* configure.ac: (target_alias): Default to $host_alias, not
[official-gcc.git] / gcc / loop-iv.c
blob7105aaddcbc3c228e06cdf251fbb7153a42eb856
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
26 demand.
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
35 only one).
37 Induction variable is then simply analyzed by walking the use-def
38 chains.
40 Usage:
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
46 ...
48 iv_analysis_done (); */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "hard-reg-set.h"
56 #include "basic-block.h"
57 #include "cfgloop.h"
58 #include "expr.h"
59 #include "output.h"
61 /* The insn information. */
63 struct insn_info
65 /* Id of the insn. */
66 unsigned luid;
68 /* The previous definition of the register defined by the single
69 set in the insn. */
70 rtx prev_def;
72 /* The description of the iv. */
73 struct rtx_iv iv;
76 static struct insn_info *insn_info;
78 /* The last definition of register. */
80 static rtx *last_def;
82 /* The bivs. */
84 static struct rtx_iv *bivs;
86 /* Maximal insn number for that there is place in insn_info array. */
88 static unsigned max_insn_no;
90 /* Maximal register number for that there is place in bivs and last_def
91 arrays. */
93 static unsigned max_reg_no;
95 /* Dumps information about IV to FILE. */
97 extern void dump_iv_info (FILE *, struct rtx_iv *);
98 void
99 dump_iv_info (FILE *file, struct rtx_iv *iv)
101 if (!iv->base)
103 fprintf (file, "not simple");
104 return;
107 if (iv->step == const0_rtx
108 && !iv->first_special)
109 fprintf (file, "invariant ");
111 print_rtl (file, iv->base);
112 if (iv->step != const0_rtx)
114 fprintf (file, " + ");
115 print_rtl (file, iv->step);
116 fprintf (file, " * iteration");
118 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
120 if (iv->mode != iv->extend_mode)
121 fprintf (file, " %s to %s",
122 rtx_name[iv->extend],
123 GET_MODE_NAME (iv->extend_mode));
125 if (iv->mult != const1_rtx)
127 fprintf (file, " * ");
128 print_rtl (file, iv->mult);
130 if (iv->delta != const0_rtx)
132 fprintf (file, " + ");
133 print_rtl (file, iv->delta);
135 if (iv->first_special)
136 fprintf (file, " (first special)");
139 /* Assigns luids to insns in basic block BB. */
141 static void
142 assign_luids (basic_block bb)
144 unsigned i = 0, uid;
145 rtx insn;
147 FOR_BB_INSNS (bb, insn)
149 uid = INSN_UID (insn);
150 insn_info[uid].luid = i++;
151 insn_info[uid].prev_def = NULL_RTX;
152 insn_info[uid].iv.analysed = false;
156 /* Generates a subreg to get the least significant part of EXPR (in mode
157 INNER_MODE) to OUTER_MODE. */
160 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
161 enum machine_mode inner_mode)
163 return simplify_gen_subreg (outer_mode, expr, inner_mode,
164 subreg_lowpart_offset (outer_mode, inner_mode));
167 /* Checks whether REG is a well-behaved register. */
169 static bool
170 simple_reg_p (rtx reg)
172 unsigned r;
174 if (GET_CODE (reg) == SUBREG)
176 if (!subreg_lowpart_p (reg))
177 return false;
178 reg = SUBREG_REG (reg);
181 if (!REG_P (reg))
182 return false;
184 r = REGNO (reg);
185 if (HARD_REGISTER_NUM_P (r))
186 return false;
188 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
189 return false;
191 if (last_def[r] == const0_rtx)
192 return false;
194 return true;
197 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
199 static bool
200 simple_set_p (rtx lhs, rtx rhs)
202 rtx op0, op1;
204 if (!REG_P (lhs)
205 || !simple_reg_p (lhs))
206 return false;
208 if (CONSTANT_P (rhs))
209 return true;
211 switch (GET_CODE (rhs))
213 case SUBREG:
214 case REG:
215 return simple_reg_p (rhs);
217 case SIGN_EXTEND:
218 case ZERO_EXTEND:
219 case NEG:
220 return simple_reg_p (XEXP (rhs, 0));
222 case PLUS:
223 case MINUS:
224 case MULT:
225 case ASHIFT:
226 op0 = XEXP (rhs, 0);
227 op1 = XEXP (rhs, 1);
229 if (!simple_reg_p (op0)
230 && !CONSTANT_P (op0))
231 return false;
233 if (!simple_reg_p (op1)
234 && !CONSTANT_P (op1))
235 return false;
237 if (GET_CODE (rhs) == MULT
238 && !CONSTANT_P (op0)
239 && !CONSTANT_P (op1))
240 return false;
242 if (GET_CODE (rhs) == ASHIFT
243 && CONSTANT_P (op0))
244 return false;
246 return true;
248 default:
249 return false;
253 /* Mark single SET in INSN. */
255 static rtx
256 mark_single_set (rtx insn, rtx set)
258 rtx def = SET_DEST (set), src;
259 unsigned regno, uid;
261 src = find_reg_equal_equiv_note (insn);
262 if (src)
263 src = XEXP (src, 0);
264 else
265 src = SET_SRC (set);
267 if (!simple_set_p (SET_DEST (set), src))
268 return NULL_RTX;
270 regno = REGNO (def);
271 uid = INSN_UID (insn);
273 bivs[regno].analysed = false;
274 insn_info[uid].prev_def = last_def[regno];
275 last_def[regno] = insn;
277 return def;
280 /* Invalidate register REG unless it is equal to EXCEPT. */
282 static void
283 kill_sets (rtx reg, rtx by ATTRIBUTE_UNUSED, void *except)
285 if (GET_CODE (reg) == SUBREG)
286 reg = SUBREG_REG (reg);
287 if (!REG_P (reg))
288 return;
289 if (reg == except)
290 return;
292 last_def[REGNO (reg)] = const0_rtx;
295 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
296 latch. */
298 static void
299 mark_sets (basic_block bb, bool dom)
301 rtx insn, set, def;
303 FOR_BB_INSNS (bb, insn)
305 if (!INSN_P (insn))
306 continue;
308 if (dom
309 && (set = single_set (insn)))
310 def = mark_single_set (insn, set);
311 else
312 def = NULL_RTX;
314 note_stores (PATTERN (insn), kill_sets, def);
318 /* Prepare the data for an induction variable analysis of a LOOP. */
320 void
321 iv_analysis_loop_init (struct loop *loop)
323 basic_block *body = get_loop_body_in_dom_order (loop);
324 unsigned b;
326 if ((unsigned) get_max_uid () >= max_insn_no)
328 /* Add some reserve for insns and registers produced in optimizations. */
329 max_insn_no = get_max_uid () + 100;
330 if (insn_info)
331 free (insn_info);
332 insn_info = xmalloc (max_insn_no * sizeof (struct insn_info));
335 if ((unsigned) max_reg_num () >= max_reg_no)
337 max_reg_no = max_reg_num () + 100;
338 if (last_def)
339 free (last_def);
340 last_def = xmalloc (max_reg_no * sizeof (rtx));
341 if (bivs)
342 free (bivs);
343 bivs = xmalloc (max_reg_no * sizeof (struct rtx_iv));
346 memset (last_def, 0, max_reg_num () * sizeof (rtx));
348 for (b = 0; b < loop->num_nodes; b++)
350 assign_luids (body[b]);
351 mark_sets (body[b], just_once_each_iteration_p (loop, body[b]));
354 free (body);
357 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
358 is returned. If INSN is before the first def in the loop, NULL_RTX is
359 returned. */
362 iv_get_reaching_def (rtx insn, rtx reg)
364 unsigned regno, luid, auid;
365 rtx ainsn;
366 basic_block bb, abb;
368 if (GET_CODE (reg) == SUBREG)
370 if (!subreg_lowpart_p (reg))
371 return const0_rtx;
372 reg = SUBREG_REG (reg);
374 if (!REG_P (reg))
375 return NULL_RTX;
377 regno = REGNO (reg);
378 if (!last_def[regno]
379 || last_def[regno] == const0_rtx)
380 return last_def[regno];
382 bb = BLOCK_FOR_INSN (insn);
383 luid = insn_info[INSN_UID (insn)].luid;
385 ainsn = last_def[regno];
386 while (1)
388 abb = BLOCK_FOR_INSN (ainsn);
390 if (dominated_by_p (CDI_DOMINATORS, bb, abb))
391 break;
393 auid = INSN_UID (ainsn);
394 ainsn = insn_info[auid].prev_def;
396 if (!ainsn)
397 return NULL_RTX;
400 while (1)
402 abb = BLOCK_FOR_INSN (ainsn);
403 if (abb != bb)
404 return ainsn;
406 auid = INSN_UID (ainsn);
407 if (luid > insn_info[auid].luid)
408 return ainsn;
410 ainsn = insn_info[auid].prev_def;
411 if (!ainsn)
412 return NULL_RTX;
416 /* Sets IV to invariant CST in MODE. Always returns true (just for
417 consistency with other iv manipulation functions that may fail). */
419 static bool
420 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
422 if (mode == VOIDmode)
423 mode = GET_MODE (cst);
425 iv->analysed = true;
426 iv->mode = mode;
427 iv->base = cst;
428 iv->step = const0_rtx;
429 iv->first_special = false;
430 iv->extend = UNKNOWN;
431 iv->extend_mode = iv->mode;
432 iv->delta = const0_rtx;
433 iv->mult = const1_rtx;
435 return true;
438 /* Evaluates application of subreg to MODE on IV. */
440 static bool
441 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
443 /* If iv is invariant, just calculate the new value. */
444 if (iv->step == const0_rtx
445 && !iv->first_special)
447 rtx val = get_iv_value (iv, const0_rtx);
448 val = lowpart_subreg (mode, val, iv->extend_mode);
450 iv->base = val;
451 iv->extend = UNKNOWN;
452 iv->mode = iv->extend_mode = mode;
453 iv->delta = const0_rtx;
454 iv->mult = const1_rtx;
455 return true;
458 if (iv->extend_mode == mode)
459 return true;
461 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
462 return false;
464 iv->extend = UNKNOWN;
465 iv->mode = mode;
467 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
468 simplify_gen_binary (MULT, iv->extend_mode,
469 iv->base, iv->mult));
470 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
471 iv->mult = const1_rtx;
472 iv->delta = const0_rtx;
473 iv->first_special = false;
475 return true;
478 /* Evaluates application of EXTEND to MODE on IV. */
480 static bool
481 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
483 /* If iv is invariant, just calculate the new value. */
484 if (iv->step == const0_rtx
485 && !iv->first_special)
487 rtx val = get_iv_value (iv, const0_rtx);
488 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
490 iv->base = val;
491 iv->extend = UNKNOWN;
492 iv->mode = iv->extend_mode = mode;
493 iv->delta = const0_rtx;
494 iv->mult = const1_rtx;
495 return true;
498 if (mode != iv->extend_mode)
499 return false;
501 if (iv->extend != UNKNOWN
502 && iv->extend != extend)
503 return false;
505 iv->extend = extend;
507 return true;
510 /* Evaluates negation of IV. */
512 static bool
513 iv_neg (struct rtx_iv *iv)
515 if (iv->extend == UNKNOWN)
517 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
518 iv->base, iv->extend_mode);
519 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
520 iv->step, iv->extend_mode);
522 else
524 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
525 iv->delta, iv->extend_mode);
526 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
527 iv->mult, iv->extend_mode);
530 return true;
533 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
535 static bool
536 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
538 enum machine_mode mode;
539 rtx arg;
541 /* Extend the constant to extend_mode of the other operand if necessary. */
542 if (iv0->extend == UNKNOWN
543 && iv0->mode == iv0->extend_mode
544 && iv0->step == const0_rtx
545 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
547 iv0->extend_mode = iv1->extend_mode;
548 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
549 iv0->base, iv0->mode);
551 if (iv1->extend == UNKNOWN
552 && iv1->mode == iv1->extend_mode
553 && iv1->step == const0_rtx
554 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
556 iv1->extend_mode = iv0->extend_mode;
557 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
558 iv1->base, iv1->mode);
561 mode = iv0->extend_mode;
562 if (mode != iv1->extend_mode)
563 return false;
565 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
567 if (iv0->mode != iv1->mode)
568 return false;
570 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
571 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
573 return true;
576 /* Handle addition of constant. */
577 if (iv1->extend == UNKNOWN
578 && iv1->mode == mode
579 && iv1->step == const0_rtx)
581 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
582 return true;
585 if (iv0->extend == UNKNOWN
586 && iv0->mode == mode
587 && iv0->step == const0_rtx)
589 arg = iv0->base;
590 *iv0 = *iv1;
591 if (op == MINUS
592 && !iv_neg (iv0))
593 return false;
595 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
596 return true;
599 return false;
602 /* Evaluates multiplication of IV by constant CST. */
604 static bool
605 iv_mult (struct rtx_iv *iv, rtx mby)
607 enum machine_mode mode = iv->extend_mode;
609 if (GET_MODE (mby) != VOIDmode
610 && GET_MODE (mby) != mode)
611 return false;
613 if (iv->extend == UNKNOWN)
615 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
616 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
618 else
620 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
621 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
624 return true;
627 /* Evaluates shift of IV by constant CST. */
629 static bool
630 iv_shift (struct rtx_iv *iv, rtx mby)
632 enum machine_mode mode = iv->extend_mode;
634 if (GET_MODE (mby) != VOIDmode
635 && GET_MODE (mby) != mode)
636 return false;
638 if (iv->extend == UNKNOWN)
640 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
641 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
643 else
645 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
646 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
649 return true;
652 /* The recursive part of get_biv_step. Gets the value of the single value
653 defined in INSN wrto initial value of REG inside loop, in shape described
654 at get_biv_step. */
656 static bool
657 get_biv_step_1 (rtx insn, rtx reg,
658 rtx *inner_step, enum machine_mode *inner_mode,
659 enum rtx_code *extend, enum machine_mode outer_mode,
660 rtx *outer_step)
662 rtx set, lhs, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
663 rtx next, nextr, def_insn, tmp;
664 enum rtx_code code;
666 set = single_set (insn);
667 rhs = find_reg_equal_equiv_note (insn);
668 if (rhs)
669 rhs = XEXP (rhs, 0);
670 else
671 rhs = SET_SRC (set);
672 lhs = SET_DEST (set);
674 code = GET_CODE (rhs);
675 switch (code)
677 case SUBREG:
678 case REG:
679 next = rhs;
680 break;
682 case PLUS:
683 case MINUS:
684 op0 = XEXP (rhs, 0);
685 op1 = XEXP (rhs, 1);
687 if (code == PLUS && CONSTANT_P (op0))
689 tmp = op0; op0 = op1; op1 = tmp;
692 if (!simple_reg_p (op0)
693 || !CONSTANT_P (op1))
694 return false;
696 if (GET_MODE (rhs) != outer_mode)
698 /* ppc64 uses expressions like
700 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
702 this is equivalent to
704 (set x':DI (plus:DI y:DI 1))
705 (set x:SI (subreg:SI (x':DI)). */
706 if (GET_CODE (op0) != SUBREG)
707 return false;
708 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
709 return false;
712 next = op0;
713 break;
715 case SIGN_EXTEND:
716 case ZERO_EXTEND:
717 if (GET_MODE (rhs) != outer_mode)
718 return false;
720 op0 = XEXP (rhs, 0);
721 if (!simple_reg_p (op0))
722 return false;
724 next = op0;
725 break;
727 default:
728 return false;
731 if (GET_CODE (next) == SUBREG)
733 if (!subreg_lowpart_p (next))
734 return false;
736 nextr = SUBREG_REG (next);
737 if (GET_MODE (nextr) != outer_mode)
738 return false;
740 else
741 nextr = next;
743 def_insn = iv_get_reaching_def (insn, nextr);
744 if (def_insn == const0_rtx)
745 return false;
747 if (!def_insn)
749 if (!rtx_equal_p (nextr, reg))
750 return false;
752 *inner_step = const0_rtx;
753 *extend = UNKNOWN;
754 *inner_mode = outer_mode;
755 *outer_step = const0_rtx;
757 else if (!get_biv_step_1 (def_insn, reg,
758 inner_step, inner_mode, extend, outer_mode,
759 outer_step))
760 return false;
762 if (GET_CODE (next) == SUBREG)
764 enum machine_mode amode = GET_MODE (next);
766 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
767 return false;
769 *inner_mode = amode;
770 *inner_step = simplify_gen_binary (PLUS, outer_mode,
771 *inner_step, *outer_step);
772 *outer_step = const0_rtx;
773 *extend = UNKNOWN;
776 switch (code)
778 case REG:
779 case SUBREG:
780 break;
782 case PLUS:
783 case MINUS:
784 if (*inner_mode == outer_mode
785 /* See comment in previous switch. */
786 || GET_MODE (rhs) != outer_mode)
787 *inner_step = simplify_gen_binary (code, outer_mode,
788 *inner_step, op1);
789 else
790 *outer_step = simplify_gen_binary (code, outer_mode,
791 *outer_step, op1);
792 break;
794 case SIGN_EXTEND:
795 case ZERO_EXTEND:
796 if (GET_MODE (op0) != *inner_mode
797 || *extend != UNKNOWN
798 || *outer_step != const0_rtx)
799 abort ();
801 *extend = code;
802 break;
804 default:
805 abort ();
808 return true;
811 /* Gets the operation on register REG inside loop, in shape
813 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
815 If the operation cannot be described in this shape, return false. */
817 static bool
818 get_biv_step (rtx reg, rtx *inner_step, enum machine_mode *inner_mode,
819 enum rtx_code *extend, enum machine_mode *outer_mode,
820 rtx *outer_step)
822 *outer_mode = GET_MODE (reg);
824 if (!get_biv_step_1 (last_def[REGNO (reg)], reg,
825 inner_step, inner_mode, extend, *outer_mode,
826 outer_step))
827 return false;
829 if (*inner_mode != *outer_mode
830 && *extend == UNKNOWN)
831 abort ();
833 if (*inner_mode == *outer_mode
834 && *extend != UNKNOWN)
835 abort ();
837 if (*inner_mode == *outer_mode
838 && *outer_step != const0_rtx)
839 abort ();
841 return true;
844 /* Determines whether DEF is a biv and if so, stores its description
845 to *IV. */
847 static bool
848 iv_analyze_biv (rtx def, struct rtx_iv *iv)
850 unsigned regno;
851 rtx inner_step, outer_step;
852 enum machine_mode inner_mode, outer_mode;
853 enum rtx_code extend;
855 if (dump_file)
857 fprintf (dump_file, "Analysing ");
858 print_rtl (dump_file, def);
859 fprintf (dump_file, " for bivness.\n");
862 if (!REG_P (def))
864 if (!CONSTANT_P (def))
865 return false;
867 return iv_constant (iv, def, VOIDmode);
870 regno = REGNO (def);
871 if (last_def[regno] == const0_rtx)
873 if (dump_file)
874 fprintf (dump_file, " not simple.\n");
875 return false;
878 if (last_def[regno] && bivs[regno].analysed)
880 if (dump_file)
881 fprintf (dump_file, " already analysed.\n");
883 *iv = bivs[regno];
884 return iv->base != NULL_RTX;
887 if (!last_def[regno])
889 iv_constant (iv, def, VOIDmode);
890 goto end;
893 iv->analysed = true;
894 if (!get_biv_step (def, &inner_step, &inner_mode, &extend,
895 &outer_mode, &outer_step))
897 iv->base = NULL_RTX;
898 goto end;
901 /* Loop transforms base to es (base + inner_step) + outer_step,
902 where es means extend of subreg between inner_mode and outer_mode.
903 The corresponding induction variable is
905 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
907 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
908 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
909 iv->mode = inner_mode;
910 iv->extend_mode = outer_mode;
911 iv->extend = extend;
912 iv->mult = const1_rtx;
913 iv->delta = outer_step;
914 iv->first_special = inner_mode != outer_mode;
916 end:
917 if (dump_file)
919 fprintf (dump_file, " ");
920 dump_iv_info (dump_file, iv);
921 fprintf (dump_file, "\n");
924 bivs[regno] = *iv;
926 return iv->base != NULL_RTX;
929 /* Analyzes operand OP of INSN and stores the result to *IV. */
931 static bool
932 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
934 rtx def_insn;
935 unsigned regno;
936 bool inv = CONSTANT_P (op);
938 if (dump_file)
940 fprintf (dump_file, "Analysing operand ");
941 print_rtl (dump_file, op);
942 fprintf (dump_file, " of insn ");
943 print_rtl_single (dump_file, insn);
946 if (GET_CODE (op) == SUBREG)
948 if (!subreg_lowpart_p (op))
949 return false;
951 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
952 return false;
954 return iv_subreg (iv, GET_MODE (op));
957 if (!inv)
959 regno = REGNO (op);
960 if (!last_def[regno])
961 inv = true;
962 else if (last_def[regno] == const0_rtx)
964 if (dump_file)
965 fprintf (dump_file, " not simple.\n");
966 return false;
970 if (inv)
972 iv_constant (iv, op, VOIDmode);
974 if (dump_file)
976 fprintf (dump_file, " ");
977 dump_iv_info (dump_file, iv);
978 fprintf (dump_file, "\n");
980 return true;
983 def_insn = iv_get_reaching_def (insn, op);
984 if (def_insn == const0_rtx)
986 if (dump_file)
987 fprintf (dump_file, " not simple.\n");
988 return false;
991 return iv_analyze (def_insn, op, iv);
994 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
996 bool
997 iv_analyze (rtx insn, rtx def, struct rtx_iv *iv)
999 unsigned uid;
1000 rtx set, rhs, mby = NULL_RTX, tmp;
1001 rtx op0 = NULL_RTX, op1 = NULL_RTX;
1002 struct rtx_iv iv0, iv1;
1003 enum machine_mode amode;
1004 enum rtx_code code;
1006 if (insn == const0_rtx)
1007 return false;
1009 if (GET_CODE (def) == SUBREG)
1011 if (!subreg_lowpart_p (def))
1012 return false;
1014 if (!iv_analyze (insn, SUBREG_REG (def), iv))
1015 return false;
1017 return iv_subreg (iv, GET_MODE (def));
1020 if (!insn)
1021 return iv_analyze_biv (def, iv);
1023 if (dump_file)
1025 fprintf (dump_file, "Analysing def of ");
1026 print_rtl (dump_file, def);
1027 fprintf (dump_file, " in insn ");
1028 print_rtl_single (dump_file, insn);
1031 uid = INSN_UID (insn);
1032 if (insn_info[uid].iv.analysed)
1034 if (dump_file)
1035 fprintf (dump_file, " already analysed.\n");
1036 *iv = insn_info[uid].iv;
1037 return iv->base != NULL_RTX;
1040 iv->mode = VOIDmode;
1041 iv->base = NULL_RTX;
1042 iv->step = NULL_RTX;
1044 set = single_set (insn);
1045 rhs = find_reg_equal_equiv_note (insn);
1046 if (rhs)
1047 rhs = XEXP (rhs, 0);
1048 else
1049 rhs = SET_SRC (set);
1050 code = GET_CODE (rhs);
1052 if (CONSTANT_P (rhs))
1054 op0 = rhs;
1055 amode = GET_MODE (def);
1057 else
1059 switch (code)
1061 case SUBREG:
1062 if (!subreg_lowpart_p (rhs))
1063 goto end;
1064 op0 = rhs;
1065 break;
1067 case REG:
1068 op0 = rhs;
1069 break;
1071 case SIGN_EXTEND:
1072 case ZERO_EXTEND:
1073 case NEG:
1074 op0 = XEXP (rhs, 0);
1075 break;
1077 case PLUS:
1078 case MINUS:
1079 op0 = XEXP (rhs, 0);
1080 op1 = XEXP (rhs, 1);
1081 break;
1083 case MULT:
1084 op0 = XEXP (rhs, 0);
1085 mby = XEXP (rhs, 1);
1086 if (!CONSTANT_P (mby))
1088 if (!CONSTANT_P (op0))
1089 abort ();
1090 tmp = op0;
1091 op0 = mby;
1092 mby = tmp;
1094 break;
1096 case ASHIFT:
1097 if (CONSTANT_P (XEXP (rhs, 0)))
1098 abort ();
1099 op0 = XEXP (rhs, 0);
1100 mby = XEXP (rhs, 1);
1101 break;
1103 default:
1104 abort ();
1107 amode = GET_MODE (rhs);
1110 if (op0)
1112 if (!iv_analyze_op (insn, op0, &iv0))
1113 goto end;
1115 if (iv0.mode == VOIDmode)
1117 iv0.mode = amode;
1118 iv0.extend_mode = amode;
1122 if (op1)
1124 if (!iv_analyze_op (insn, op1, &iv1))
1125 goto end;
1127 if (iv1.mode == VOIDmode)
1129 iv1.mode = amode;
1130 iv1.extend_mode = amode;
1134 switch (code)
1136 case SIGN_EXTEND:
1137 case ZERO_EXTEND:
1138 if (!iv_extend (&iv0, code, amode))
1139 goto end;
1140 break;
1142 case NEG:
1143 if (!iv_neg (&iv0))
1144 goto end;
1145 break;
1147 case PLUS:
1148 case MINUS:
1149 if (!iv_add (&iv0, &iv1, code))
1150 goto end;
1151 break;
1153 case MULT:
1154 if (!iv_mult (&iv0, mby))
1155 goto end;
1156 break;
1158 case ASHIFT:
1159 if (!iv_shift (&iv0, mby))
1160 goto end;
1161 break;
1163 default:
1164 break;
1167 *iv = iv0;
1169 end:
1170 iv->analysed = true;
1171 insn_info[uid].iv = *iv;
1173 if (dump_file)
1175 print_rtl (dump_file, def);
1176 fprintf (dump_file, " in insn ");
1177 print_rtl_single (dump_file, insn);
1178 fprintf (dump_file, " is ");
1179 dump_iv_info (dump_file, iv);
1180 fprintf (dump_file, "\n");
1183 return iv->base != NULL_RTX;
1186 /* Checks whether definition of register REG in INSN a basic induction
1187 variable. IV analysis must have been initialized (via a call to
1188 iv_analysis_loop_init) for this function to produce a result. */
1190 bool
1191 biv_p (rtx insn, rtx reg)
1193 struct rtx_iv iv;
1195 if (!REG_P (reg))
1196 return false;
1198 if (last_def[REGNO (reg)] != insn)
1199 return false;
1201 return iv_analyze_biv (reg, &iv);
1204 /* Calculates value of IV at ITERATION-th iteration. */
1207 get_iv_value (struct rtx_iv *iv, rtx iteration)
1209 rtx val;
1211 /* We would need to generate some if_then_else patterns, and so far
1212 it is not needed anywhere. */
1213 if (iv->first_special)
1214 abort ();
1216 if (iv->step != const0_rtx && iteration != const0_rtx)
1217 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1218 simplify_gen_binary (MULT, iv->extend_mode,
1219 iv->step, iteration));
1220 else
1221 val = iv->base;
1223 if (iv->extend_mode == iv->mode)
1224 return val;
1226 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1228 if (iv->extend == UNKNOWN)
1229 return val;
1231 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1232 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1233 simplify_gen_binary (MULT, iv->extend_mode,
1234 iv->mult, val));
1236 return val;
1239 /* Free the data for an induction variable analysis. */
1241 void
1242 iv_analysis_done (void)
1244 max_insn_no = 0;
1245 max_reg_no = 0;
1246 if (insn_info)
1248 free (insn_info);
1249 insn_info = NULL;
1251 if (last_def)
1253 free (last_def);
1254 last_def = NULL;
1256 if (bivs)
1258 free (bivs);
1259 bivs = NULL;
1263 /* Computes inverse to X modulo (1 << MOD). */
1265 static unsigned HOST_WIDEST_INT
1266 inverse (unsigned HOST_WIDEST_INT x, int mod)
1268 unsigned HOST_WIDEST_INT mask =
1269 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1270 unsigned HOST_WIDEST_INT rslt = 1;
1271 int i;
1273 for (i = 0; i < mod - 1; i++)
1275 rslt = (rslt * x) & mask;
1276 x = (x * x) & mask;
1279 return rslt;
1282 /* Tries to estimate the maximum number of iterations. */
1284 static unsigned HOST_WIDEST_INT
1285 determine_max_iter (struct niter_desc *desc)
1287 rtx niter = desc->niter_expr;
1288 rtx mmin, mmax, left, right;
1289 unsigned HOST_WIDEST_INT nmax, inc;
1291 if (GET_CODE (niter) == AND
1292 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
1294 nmax = INTVAL (XEXP (niter, 0));
1295 if (!(nmax & (nmax + 1)))
1297 desc->niter_max = nmax;
1298 return nmax;
1302 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
1303 nmax = INTVAL (mmax) - INTVAL (mmin);
1305 if (GET_CODE (niter) == UDIV)
1307 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
1309 desc->niter_max = nmax;
1310 return nmax;
1312 inc = INTVAL (XEXP (niter, 1));
1313 niter = XEXP (niter, 0);
1315 else
1316 inc = 1;
1318 if (GET_CODE (niter) == PLUS)
1320 left = XEXP (niter, 0);
1321 right = XEXP (niter, 0);
1323 if (GET_CODE (right) == CONST_INT)
1324 right = GEN_INT (-INTVAL (right));
1326 else if (GET_CODE (niter) == MINUS)
1328 left = XEXP (niter, 0);
1329 right = XEXP (niter, 0);
1331 else
1333 left = niter;
1334 right = mmin;
1337 if (GET_CODE (left) == CONST_INT)
1338 mmax = left;
1339 if (GET_CODE (right) == CONST_INT)
1340 mmin = right;
1341 nmax = INTVAL (mmax) - INTVAL (mmin);
1343 desc->niter_max = nmax / inc;
1344 return nmax / inc;
1347 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1349 static int
1350 altered_reg_used (rtx *reg, void *alt)
1352 if (!REG_P (*reg))
1353 return 0;
1355 return REGNO_REG_SET_P (alt, REGNO (*reg));
1358 /* Marks registers altered by EXPR in set ALT. */
1360 static void
1361 mark_altered (rtx expr, rtx by ATTRIBUTE_UNUSED, void *alt)
1363 if (GET_CODE (expr) == SUBREG)
1364 expr = SUBREG_REG (expr);
1365 if (!REG_P (expr))
1366 return;
1368 SET_REGNO_REG_SET (alt, REGNO (expr));
1371 /* Checks whether RHS is simple enough to process. */
1373 static bool
1374 simple_rhs_p (rtx rhs)
1376 rtx op0, op1;
1378 if (CONSTANT_P (rhs)
1379 || REG_P (rhs))
1380 return true;
1382 switch (GET_CODE (rhs))
1384 case PLUS:
1385 case MINUS:
1386 op0 = XEXP (rhs, 0);
1387 op1 = XEXP (rhs, 1);
1388 /* Allow reg + const sets only. */
1389 if (REG_P (op0) && CONSTANT_P (op1))
1390 return true;
1391 if (REG_P (op1) && CONSTANT_P (op0))
1392 return true;
1394 return false;
1396 default:
1397 return false;
1401 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1402 altered so far. */
1404 static void
1405 simplify_using_assignment (rtx insn, rtx *expr, regset altered)
1407 rtx set = single_set (insn);
1408 rtx lhs = NULL_RTX, rhs;
1409 bool ret = false;
1411 if (set)
1413 lhs = SET_DEST (set);
1414 if (!REG_P (lhs)
1415 || altered_reg_used (&lhs, altered))
1416 ret = true;
1418 else
1419 ret = true;
1421 note_stores (PATTERN (insn), mark_altered, altered);
1422 if (CALL_P (insn))
1424 int i;
1426 /* Kill all call clobbered registers. */
1427 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1428 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1429 SET_REGNO_REG_SET (altered, i);
1432 if (ret)
1433 return;
1435 rhs = find_reg_equal_equiv_note (insn);
1436 if (rhs)
1437 rhs = XEXP (rhs, 0);
1438 else
1439 rhs = SET_SRC (set);
1441 if (!simple_rhs_p (rhs))
1442 return;
1444 if (for_each_rtx (&rhs, altered_reg_used, altered))
1445 return;
1447 *expr = simplify_replace_rtx (*expr, lhs, rhs);
1450 /* Checks whether A implies B. */
1452 static bool
1453 implies_p (rtx a, rtx b)
1455 rtx op0, op1, opb0, opb1, r;
1456 enum machine_mode mode;
1458 if (GET_CODE (a) == EQ)
1460 op0 = XEXP (a, 0);
1461 op1 = XEXP (a, 1);
1463 if (REG_P (op0))
1465 r = simplify_replace_rtx (b, op0, op1);
1466 if (r == const_true_rtx)
1467 return true;
1470 if (REG_P (op1))
1472 r = simplify_replace_rtx (b, op1, op0);
1473 if (r == const_true_rtx)
1474 return true;
1478 /* A < B implies A + 1 <= B. */
1479 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1480 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1482 op0 = XEXP (a, 0);
1483 op1 = XEXP (a, 1);
1484 opb0 = XEXP (b, 0);
1485 opb1 = XEXP (b, 1);
1487 if (GET_CODE (a) == GT)
1489 r = op0;
1490 op0 = op1;
1491 op1 = r;
1494 if (GET_CODE (b) == GE)
1496 r = opb0;
1497 opb0 = opb1;
1498 opb1 = r;
1501 mode = GET_MODE (op0);
1502 if (mode != GET_MODE (opb0))
1503 mode = VOIDmode;
1504 else if (mode == VOIDmode)
1506 mode = GET_MODE (op1);
1507 if (mode != GET_MODE (opb1))
1508 mode = VOIDmode;
1511 if (mode != VOIDmode
1512 && rtx_equal_p (op1, opb1)
1513 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1514 return true;
1517 return false;
1520 /* Canonicalizes COND so that
1522 (1) Ensure that operands are ordered according to
1523 swap_commutative_operands_p.
1524 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1525 for GE, GEU, and LEU. */
1528 canon_condition (rtx cond)
1530 rtx tem;
1531 rtx op0, op1;
1532 enum rtx_code code;
1533 enum machine_mode mode;
1535 code = GET_CODE (cond);
1536 op0 = XEXP (cond, 0);
1537 op1 = XEXP (cond, 1);
1539 if (swap_commutative_operands_p (op0, op1))
1541 code = swap_condition (code);
1542 tem = op0;
1543 op0 = op1;
1544 op1 = tem;
1547 mode = GET_MODE (op0);
1548 if (mode == VOIDmode)
1549 mode = GET_MODE (op1);
1550 if (mode == VOIDmode)
1551 abort ();
1553 if (GET_CODE (op1) == CONST_INT
1554 && GET_MODE_CLASS (mode) != MODE_CC
1555 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1557 HOST_WIDE_INT const_val = INTVAL (op1);
1558 unsigned HOST_WIDE_INT uconst_val = const_val;
1559 unsigned HOST_WIDE_INT max_val
1560 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1562 switch (code)
1564 case LE:
1565 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1566 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1567 break;
1569 /* When cross-compiling, const_val might be sign-extended from
1570 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1571 case GE:
1572 if ((HOST_WIDE_INT) (const_val & max_val)
1573 != (((HOST_WIDE_INT) 1
1574 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1575 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1576 break;
1578 case LEU:
1579 if (uconst_val < max_val)
1580 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1581 break;
1583 case GEU:
1584 if (uconst_val != 0)
1585 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1586 break;
1588 default:
1589 break;
1593 if (op0 != XEXP (cond, 0)
1594 || op1 != XEXP (cond, 1)
1595 || code != GET_CODE (cond)
1596 || GET_MODE (cond) != SImode)
1597 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1599 return cond;
1602 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1603 set of altered regs. */
1605 void
1606 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1608 rtx rev, reve, exp = *expr;
1610 if (!COMPARISON_P (exp))
1611 return;
1613 /* If some register gets altered later, we do not really speak about its
1614 value at the time of comparison. */
1615 if (altered
1616 && for_each_rtx (&cond, altered_reg_used, altered))
1617 return;
1619 rev = reversed_condition (cond);
1620 reve = reversed_condition (exp);
1622 cond = canon_condition (cond);
1623 exp = canon_condition (exp);
1624 if (rev)
1625 rev = canon_condition (rev);
1626 if (reve)
1627 reve = canon_condition (reve);
1629 if (rtx_equal_p (exp, cond))
1631 *expr = const_true_rtx;
1632 return;
1636 if (rev && rtx_equal_p (exp, rev))
1638 *expr = const0_rtx;
1639 return;
1642 if (implies_p (cond, exp))
1644 *expr = const_true_rtx;
1645 return;
1648 if (reve && implies_p (cond, reve))
1650 *expr = const0_rtx;
1651 return;
1654 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1655 be false. */
1656 if (rev && implies_p (exp, rev))
1658 *expr = const0_rtx;
1659 return;
1662 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1663 if (rev && reve && implies_p (reve, rev))
1665 *expr = const_true_rtx;
1666 return;
1669 /* We would like to have some other tests here. TODO. */
1671 return;
1674 /* Use relationship between A and *B to eventually eliminate *B.
1675 OP is the operation we consider. */
1677 static void
1678 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1680 if (op == AND)
1682 /* If A implies *B, we may replace *B by true. */
1683 if (implies_p (a, *b))
1684 *b = const_true_rtx;
1686 else if (op == IOR)
1688 /* If *B implies A, we may replace *B by false. */
1689 if (implies_p (*b, a))
1690 *b = const0_rtx;
1692 else
1693 abort ();
1696 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1697 operation we consider. */
1699 static void
1700 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1702 rtx elt;
1704 for (elt = tail; elt; elt = XEXP (elt, 1))
1705 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1706 for (elt = tail; elt; elt = XEXP (elt, 1))
1707 eliminate_implied_condition (op, XEXP (elt, 0), head);
1710 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1711 is a list, its elements are assumed to be combined using OP. */
1713 static void
1714 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1716 rtx head, tail, insn;
1717 rtx neutral, aggr;
1718 regset altered;
1719 regset_head altered_head;
1720 edge e;
1722 if (!*expr)
1723 return;
1725 if (CONSTANT_P (*expr))
1726 return;
1728 if (GET_CODE (*expr) == EXPR_LIST)
1730 head = XEXP (*expr, 0);
1731 tail = XEXP (*expr, 1);
1733 eliminate_implied_conditions (op, &head, tail);
1735 if (op == AND)
1737 neutral = const_true_rtx;
1738 aggr = const0_rtx;
1740 else if (op == IOR)
1742 neutral = const0_rtx;
1743 aggr = const_true_rtx;
1745 else
1746 abort ();
1748 simplify_using_initial_values (loop, UNKNOWN, &head);
1749 if (head == aggr)
1751 XEXP (*expr, 0) = aggr;
1752 XEXP (*expr, 1) = NULL_RTX;
1753 return;
1755 else if (head == neutral)
1757 *expr = tail;
1758 simplify_using_initial_values (loop, op, expr);
1759 return;
1761 simplify_using_initial_values (loop, op, &tail);
1763 if (tail && XEXP (tail, 0) == aggr)
1765 *expr = tail;
1766 return;
1769 XEXP (*expr, 0) = head;
1770 XEXP (*expr, 1) = tail;
1771 return;
1774 if (op != UNKNOWN)
1775 abort ();
1777 e = loop_preheader_edge (loop);
1778 if (e->src == ENTRY_BLOCK_PTR)
1779 return;
1781 altered = INITIALIZE_REG_SET (altered_head);
1783 while (1)
1785 basic_block tmp_bb;
1787 insn = BB_END (e->src);
1788 if (any_condjump_p (insn))
1790 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1792 if (cond && (e->flags & EDGE_FALLTHRU))
1793 cond = reversed_condition (cond);
1794 if (cond)
1796 simplify_using_condition (cond, expr, altered);
1797 if (CONSTANT_P (*expr))
1799 FREE_REG_SET (altered);
1800 return;
1805 FOR_BB_INSNS_REVERSE (e->src, insn)
1807 if (!INSN_P (insn))
1808 continue;
1810 simplify_using_assignment (insn, expr, altered);
1811 if (CONSTANT_P (*expr))
1813 FREE_REG_SET (altered);
1814 return;
1818 /* This is a bit subtle. Store away e->src in tmp_bb, since we
1819 modify `e' and this can invalidate the subsequent count of
1820 e->src's predecessors by looking at the wrong block. */
1821 tmp_bb = e->src;
1822 e = EDGE_PRED (tmp_bb, 0);
1823 if (EDGE_COUNT (tmp_bb->preds) > 1
1824 || e->src == ENTRY_BLOCK_PTR)
1825 break;
1828 FREE_REG_SET (altered);
1831 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1832 that IV occurs as left operands of comparison COND and its signedness
1833 is SIGNED_P to DESC. */
1835 static void
1836 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
1837 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
1839 rtx mmin, mmax, cond_over, cond_under;
1841 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
1842 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
1843 iv->base, mmin);
1844 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
1845 iv->base, mmax);
1847 switch (cond)
1849 case LE:
1850 case LT:
1851 case LEU:
1852 case LTU:
1853 if (cond_under != const0_rtx)
1854 desc->infinite =
1855 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1856 if (cond_over != const0_rtx)
1857 desc->noloop_assumptions =
1858 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
1859 break;
1861 case GE:
1862 case GT:
1863 case GEU:
1864 case GTU:
1865 if (cond_over != const0_rtx)
1866 desc->infinite =
1867 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1868 if (cond_under != const0_rtx)
1869 desc->noloop_assumptions =
1870 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
1871 break;
1873 case NE:
1874 if (cond_over != const0_rtx)
1875 desc->infinite =
1876 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1877 if (cond_under != const0_rtx)
1878 desc->infinite =
1879 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1880 break;
1882 default:
1883 abort ();
1886 iv->mode = mode;
1887 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
1890 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1891 subregs of the same mode if possible (sometimes it is necessary to add
1892 some assumptions to DESC). */
1894 static bool
1895 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
1896 enum rtx_code cond, struct niter_desc *desc)
1898 enum machine_mode comp_mode;
1899 bool signed_p;
1901 /* If the ivs behave specially in the first iteration, or are
1902 added/multiplied after extending, we ignore them. */
1903 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
1904 return false;
1905 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
1906 return false;
1908 /* If there is some extend, it must match signedness of the comparison. */
1909 switch (cond)
1911 case LE:
1912 case LT:
1913 if (iv0->extend == ZERO_EXTEND
1914 || iv1->extend == ZERO_EXTEND)
1915 return false;
1916 signed_p = true;
1917 break;
1919 case LEU:
1920 case LTU:
1921 if (iv0->extend == SIGN_EXTEND
1922 || iv1->extend == SIGN_EXTEND)
1923 return false;
1924 signed_p = false;
1925 break;
1927 case NE:
1928 if (iv0->extend != UNKNOWN
1929 && iv1->extend != UNKNOWN
1930 && iv0->extend != iv1->extend)
1931 return false;
1933 signed_p = false;
1934 if (iv0->extend != UNKNOWN)
1935 signed_p = iv0->extend == SIGN_EXTEND;
1936 if (iv1->extend != UNKNOWN)
1937 signed_p = iv1->extend == SIGN_EXTEND;
1938 break;
1940 default:
1941 abort ();
1944 /* Values of both variables should be computed in the same mode. These
1945 might indeed be different, if we have comparison like
1947 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1949 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1950 in different modes. This does not seem impossible to handle, but
1951 it hardly ever occurs in practice.
1953 The only exception is the case when one of operands is invariant.
1954 For example pentium 3 generates comparisons like
1955 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1956 definitely do not want this prevent the optimization. */
1957 comp_mode = iv0->extend_mode;
1958 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
1959 comp_mode = iv1->extend_mode;
1961 if (iv0->extend_mode != comp_mode)
1963 if (iv0->mode != iv0->extend_mode
1964 || iv0->step != const0_rtx)
1965 return false;
1967 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1968 comp_mode, iv0->base, iv0->mode);
1969 iv0->extend_mode = comp_mode;
1972 if (iv1->extend_mode != comp_mode)
1974 if (iv1->mode != iv1->extend_mode
1975 || iv1->step != const0_rtx)
1976 return false;
1978 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1979 comp_mode, iv1->base, iv1->mode);
1980 iv1->extend_mode = comp_mode;
1983 /* Check that both ivs belong to a range of a single mode. If one of the
1984 operands is an invariant, we may need to shorten it into the common
1985 mode. */
1986 if (iv0->mode == iv0->extend_mode
1987 && iv0->step == const0_rtx
1988 && iv0->mode != iv1->mode)
1989 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
1991 if (iv1->mode == iv1->extend_mode
1992 && iv1->step == const0_rtx
1993 && iv0->mode != iv1->mode)
1994 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
1996 if (iv0->mode != iv1->mode)
1997 return false;
1999 desc->mode = iv0->mode;
2000 desc->signed_p = signed_p;
2002 return true;
2005 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2006 the result into DESC. Very similar to determine_number_of_iterations
2007 (basically its rtl version), complicated by things like subregs. */
2009 void
2010 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2011 struct niter_desc *desc)
2013 rtx op0, op1, delta, step, bound, may_xform, def_insn, tmp, tmp0, tmp1;
2014 struct rtx_iv iv0, iv1, tmp_iv;
2015 rtx assumption, may_not_xform;
2016 enum rtx_code cond;
2017 enum machine_mode mode, comp_mode;
2018 rtx mmin, mmax, mode_mmin, mode_mmax;
2019 unsigned HOST_WIDEST_INT s, size, d, inv;
2020 HOST_WIDEST_INT up, down, inc;
2021 int was_sharp = false;
2022 rtx old_niter;
2024 /* The meaning of these assumptions is this:
2025 if !assumptions
2026 then the rest of information does not have to be valid
2027 if noloop_assumptions then the loop does not roll
2028 if infinite then this exit is never used */
2030 desc->assumptions = NULL_RTX;
2031 desc->noloop_assumptions = NULL_RTX;
2032 desc->infinite = NULL_RTX;
2033 desc->simple_p = true;
2035 desc->const_iter = false;
2036 desc->niter_expr = NULL_RTX;
2037 desc->niter_max = 0;
2039 cond = GET_CODE (condition);
2040 if (!COMPARISON_P (condition))
2041 abort ();
2043 mode = GET_MODE (XEXP (condition, 0));
2044 if (mode == VOIDmode)
2045 mode = GET_MODE (XEXP (condition, 1));
2046 /* The constant comparisons should be folded. */
2047 if (mode == VOIDmode)
2048 abort ();
2050 /* We only handle integers or pointers. */
2051 if (GET_MODE_CLASS (mode) != MODE_INT
2052 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2053 goto fail;
2055 op0 = XEXP (condition, 0);
2056 def_insn = iv_get_reaching_def (insn, op0);
2057 if (!iv_analyze (def_insn, op0, &iv0))
2058 goto fail;
2059 if (iv0.extend_mode == VOIDmode)
2060 iv0.mode = iv0.extend_mode = mode;
2062 op1 = XEXP (condition, 1);
2063 def_insn = iv_get_reaching_def (insn, op1);
2064 if (!iv_analyze (def_insn, op1, &iv1))
2065 goto fail;
2066 if (iv1.extend_mode == VOIDmode)
2067 iv1.mode = iv1.extend_mode = mode;
2069 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2070 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2071 goto fail;
2073 /* Check condition and normalize it. */
2075 switch (cond)
2077 case GE:
2078 case GT:
2079 case GEU:
2080 case GTU:
2081 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2082 cond = swap_condition (cond);
2083 break;
2084 case NE:
2085 case LE:
2086 case LEU:
2087 case LT:
2088 case LTU:
2089 break;
2090 default:
2091 goto fail;
2094 /* Handle extends. This is relatively nontrivial, so we only try in some
2095 easy cases, when we can canonicalize the ivs (possibly by adding some
2096 assumptions) to shape subreg (base + i * step). This function also fills
2097 in desc->mode and desc->signed_p. */
2099 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2100 goto fail;
2102 comp_mode = iv0.extend_mode;
2103 mode = iv0.mode;
2104 size = GET_MODE_BITSIZE (mode);
2105 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2106 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2107 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2109 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2110 goto fail;
2112 /* We can take care of the case of two induction variables chasing each other
2113 if the test is NE. I have never seen a loop using it, but still it is
2114 cool. */
2115 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2117 if (cond != NE)
2118 goto fail;
2120 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2121 iv1.step = const0_rtx;
2124 /* This is either infinite loop or the one that ends immediately, depending
2125 on initial values. Unswitching should remove this kind of conditions. */
2126 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2127 goto fail;
2129 /* Ignore loops of while (i-- < 10) type. */
2130 if (cond != NE
2131 && (INTVAL (iv0.step) < 0 || INTVAL (iv1.step) > 0))
2132 goto fail;
2134 /* Some more condition normalization. We must record some assumptions
2135 due to overflows. */
2136 switch (cond)
2138 case LT:
2139 case LTU:
2140 /* We want to take care only of non-sharp relationals; this is easy,
2141 as in cases the overflow would make the transformation unsafe
2142 the loop does not roll. Seemingly it would make more sense to want
2143 to take care of sharp relationals instead, as NE is more similar to
2144 them, but the problem is that here the transformation would be more
2145 difficult due to possibly infinite loops. */
2146 if (iv0.step == const0_rtx)
2148 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2149 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2150 mode_mmax);
2151 if (assumption == const_true_rtx)
2152 goto zero_iter;
2153 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2154 iv0.base, const1_rtx);
2156 else
2158 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2159 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2160 mode_mmin);
2161 if (assumption == const_true_rtx)
2162 goto zero_iter;
2163 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2164 iv1.base, constm1_rtx);
2167 if (assumption != const0_rtx)
2168 desc->noloop_assumptions =
2169 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2170 cond = (cond == LT) ? LE : LEU;
2172 /* It will be useful to be able to tell the difference once more in
2173 LE -> NE reduction. */
2174 was_sharp = true;
2175 break;
2176 default: ;
2179 /* Take care of trivially infinite loops. */
2180 if (cond != NE)
2182 if (iv0.step == const0_rtx)
2184 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2185 if (rtx_equal_p (tmp, mode_mmin))
2187 desc->infinite =
2188 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2189 return;
2192 else
2194 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2195 if (rtx_equal_p (tmp, mode_mmax))
2197 desc->infinite =
2198 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2199 return;
2204 /* If we can we want to take care of NE conditions instead of size
2205 comparisons, as they are much more friendly (most importantly
2206 this takes care of special handling of loops with step 1). We can
2207 do it if we first check that upper bound is greater or equal to
2208 lower bound, their difference is constant c modulo step and that
2209 there is not an overflow. */
2210 if (cond != NE)
2212 if (iv0.step == const0_rtx)
2213 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2214 else
2215 step = iv0.step;
2216 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2217 delta = lowpart_subreg (mode, delta, comp_mode);
2218 delta = simplify_gen_binary (UMOD, mode, delta, step);
2219 may_xform = const0_rtx;
2220 may_not_xform = const_true_rtx;
2222 if (GET_CODE (delta) == CONST_INT)
2224 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2226 /* A special case. We have transformed condition of type
2227 for (i = 0; i < 4; i += 4)
2228 into
2229 for (i = 0; i <= 3; i += 4)
2230 obviously if the test for overflow during that transformation
2231 passed, we cannot overflow here. Most importantly any
2232 loop with sharp end condition and step 1 falls into this
2233 category, so handling this case specially is definitely
2234 worth the troubles. */
2235 may_xform = const_true_rtx;
2237 else if (iv0.step == const0_rtx)
2239 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2240 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2241 bound = lowpart_subreg (mode, bound, comp_mode);
2242 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2243 may_xform = simplify_gen_relational (cond, SImode, mode,
2244 bound, tmp);
2245 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2246 SImode, mode,
2247 bound, tmp);
2249 else
2251 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2252 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2253 bound = lowpart_subreg (mode, bound, comp_mode);
2254 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2255 may_xform = simplify_gen_relational (cond, SImode, mode,
2256 tmp, bound);
2257 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2258 SImode, mode,
2259 tmp, bound);
2263 if (may_xform != const0_rtx)
2265 /* We perform the transformation always provided that it is not
2266 completely senseless. This is OK, as we would need this assumption
2267 to determine the number of iterations anyway. */
2268 if (may_xform != const_true_rtx)
2270 /* If the step is a power of two and the final value we have
2271 computed overflows, the cycle is infinite. Otherwise it
2272 is nontrivial to compute the number of iterations. */
2273 s = INTVAL (step);
2274 if ((s & (s - 1)) == 0)
2275 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2276 desc->infinite);
2277 else
2278 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2279 desc->assumptions);
2282 /* We are going to lose some information about upper bound on
2283 number of iterations in this step, so record the information
2284 here. */
2285 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2286 if (GET_CODE (iv1.base) == CONST_INT)
2287 up = INTVAL (iv1.base);
2288 else
2289 up = INTVAL (mode_mmax) - inc;
2290 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2291 ? iv0.base
2292 : mode_mmin);
2293 desc->niter_max = (up - down) / inc + 1;
2295 if (iv0.step == const0_rtx)
2297 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2298 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2300 else
2302 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2303 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2306 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2307 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2308 assumption = simplify_gen_relational (reverse_condition (cond),
2309 SImode, mode, tmp0, tmp1);
2310 if (assumption == const_true_rtx)
2311 goto zero_iter;
2312 else if (assumption != const0_rtx)
2313 desc->noloop_assumptions =
2314 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2315 cond = NE;
2319 /* Count the number of iterations. */
2320 if (cond == NE)
2322 /* Everything we do here is just arithmetics modulo size of mode. This
2323 makes us able to do more involved computations of number of iterations
2324 than in other cases. First transform the condition into shape
2325 s * i <> c, with s positive. */
2326 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2327 iv0.base = const0_rtx;
2328 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2329 iv1.step = const0_rtx;
2330 if (INTVAL (iv0.step) < 0)
2332 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2333 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2335 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2337 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2338 is infinite. Otherwise, the number of iterations is
2339 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2340 s = INTVAL (iv0.step); d = 1;
2341 while (s % 2 != 1)
2343 s /= 2;
2344 d *= 2;
2345 size--;
2347 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2349 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2350 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2351 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2352 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2354 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2355 inv = inverse (s, size);
2356 inv = trunc_int_for_mode (inv, mode);
2357 tmp = simplify_gen_binary (MULT, mode, tmp, GEN_INT (inv));
2358 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2360 else
2362 if (iv1.step == const0_rtx)
2363 /* Condition in shape a + s * i <= b
2364 We must know that b + s does not overflow and a <= b + s and then we
2365 can compute number of iterations as (b + s - a) / s. (It might
2366 seem that we in fact could be more clever about testing the b + s
2367 overflow condition using some information about b - a mod s,
2368 but it was already taken into account during LE -> NE transform). */
2370 step = iv0.step;
2371 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2372 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2374 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2375 lowpart_subreg (mode, step, comp_mode));
2376 assumption = simplify_gen_relational (cond, SImode, mode,
2377 tmp1, bound);
2378 desc->assumptions =
2379 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2381 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2382 tmp = lowpart_subreg (mode, tmp, comp_mode);
2383 assumption = simplify_gen_relational (reverse_condition (cond),
2384 SImode, mode, tmp0, tmp);
2386 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2387 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2389 else
2391 /* Condition in shape a <= b - s * i
2392 We must know that a - s does not overflow and a - s <= b and then
2393 we can again compute number of iterations as (b - (a - s)) / s. */
2394 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2395 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2396 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2398 bound = simplify_gen_binary (MINUS, mode, mode_mmin,
2399 lowpart_subreg (mode, step, comp_mode));
2400 assumption = simplify_gen_relational (cond, SImode, mode,
2401 bound, tmp0);
2402 desc->assumptions =
2403 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2405 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2406 tmp = lowpart_subreg (mode, tmp, comp_mode);
2407 assumption = simplify_gen_relational (reverse_condition (cond),
2408 SImode, mode,
2409 tmp, tmp1);
2410 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2411 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2413 if (assumption == const_true_rtx)
2414 goto zero_iter;
2415 else if (assumption != const0_rtx)
2416 desc->noloop_assumptions =
2417 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2418 delta = simplify_gen_binary (UDIV, mode, delta, step);
2419 desc->niter_expr = delta;
2422 old_niter = desc->niter_expr;
2424 simplify_using_initial_values (loop, AND, &desc->assumptions);
2425 if (desc->assumptions
2426 && XEXP (desc->assumptions, 0) == const0_rtx)
2427 goto fail;
2428 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2429 simplify_using_initial_values (loop, IOR, &desc->infinite);
2430 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2432 /* Rerun the simplification. Consider code (created by copying loop headers)
2434 i = 0;
2436 if (0 < n)
2440 i++;
2441 } while (i < n);
2444 The first pass determines that i = 0, the second pass uses it to eliminate
2445 noloop assumption. */
2447 simplify_using_initial_values (loop, AND, &desc->assumptions);
2448 if (desc->assumptions
2449 && XEXP (desc->assumptions, 0) == const0_rtx)
2450 goto fail;
2451 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2452 simplify_using_initial_values (loop, IOR, &desc->infinite);
2453 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2455 if (desc->noloop_assumptions
2456 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2457 goto zero_iter;
2459 if (GET_CODE (desc->niter_expr) == CONST_INT)
2461 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2463 desc->const_iter = true;
2464 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2466 else
2468 if (!desc->niter_max)
2469 desc->niter_max = determine_max_iter (desc);
2471 /* simplify_using_initial_values does a copy propagation on the registers
2472 in the expression for the number of iterations. This prolongs life
2473 ranges of registers and increases register pressure, and usually
2474 brings no gain (and if it happens to do, the cse pass will take care
2475 of it anyway). So prevent this behavior, unless it enabled us to
2476 derive that the number of iterations is a constant. */
2477 desc->niter_expr = old_niter;
2480 return;
2482 fail:
2483 desc->simple_p = false;
2484 return;
2486 zero_iter:
2487 desc->const_iter = true;
2488 desc->niter = 0;
2489 desc->niter_max = 0;
2490 desc->niter_expr = const0_rtx;
2491 return;
2494 /* Checks whether E is a simple exit from LOOP and stores its description
2495 into DESC. */
2497 static void
2498 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2500 basic_block exit_bb;
2501 rtx condition, at;
2502 edge ein;
2504 exit_bb = e->src;
2505 desc->simple_p = false;
2507 /* It must belong directly to the loop. */
2508 if (exit_bb->loop_father != loop)
2509 return;
2511 /* It must be tested (at least) once during any iteration. */
2512 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2513 return;
2515 /* It must end in a simple conditional jump. */
2516 if (!any_condjump_p (BB_END (exit_bb)))
2517 return;
2519 ein = EDGE_SUCC (exit_bb, 0);
2520 if (ein == e)
2521 ein = EDGE_SUCC (exit_bb, 1);
2523 desc->out_edge = e;
2524 desc->in_edge = ein;
2526 /* Test whether the condition is suitable. */
2527 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2528 return;
2530 if (ein->flags & EDGE_FALLTHRU)
2532 condition = reversed_condition (condition);
2533 if (!condition)
2534 return;
2537 /* Check that we are able to determine number of iterations and fill
2538 in information about it. */
2539 iv_number_of_iterations (loop, at, condition, desc);
2542 /* Finds a simple exit of LOOP and stores its description into DESC. */
2544 void
2545 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2547 unsigned i;
2548 basic_block *body;
2549 edge e;
2550 struct niter_desc act;
2551 bool any = false;
2552 edge_iterator ei;
2554 desc->simple_p = false;
2555 body = get_loop_body (loop);
2557 for (i = 0; i < loop->num_nodes; i++)
2559 FOR_EACH_EDGE (e, ei, body[i]->succs)
2561 if (flow_bb_inside_loop_p (loop, e->dest))
2562 continue;
2564 check_simple_exit (loop, e, &act);
2565 if (!act.simple_p)
2566 continue;
2568 /* Prefer constant iterations; the less the better. */
2569 if (!any)
2570 any = true;
2571 else if (!act.const_iter
2572 || (desc->const_iter && act.niter >= desc->niter))
2573 continue;
2574 *desc = act;
2578 if (dump_file)
2580 if (desc->simple_p)
2582 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2583 fprintf (dump_file, " simple exit %d -> %d\n",
2584 desc->out_edge->src->index,
2585 desc->out_edge->dest->index);
2586 if (desc->assumptions)
2588 fprintf (dump_file, " assumptions: ");
2589 print_rtl (dump_file, desc->assumptions);
2590 fprintf (dump_file, "\n");
2592 if (desc->noloop_assumptions)
2594 fprintf (dump_file, " does not roll if: ");
2595 print_rtl (dump_file, desc->noloop_assumptions);
2596 fprintf (dump_file, "\n");
2598 if (desc->infinite)
2600 fprintf (dump_file, " infinite if: ");
2601 print_rtl (dump_file, desc->infinite);
2602 fprintf (dump_file, "\n");
2605 fprintf (dump_file, " number of iterations: ");
2606 print_rtl (dump_file, desc->niter_expr);
2607 fprintf (dump_file, "\n");
2609 fprintf (dump_file, " upper bound: ");
2610 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2611 fprintf (dump_file, "\n");
2613 else
2614 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2617 free (body);
2620 /* Creates a simple loop description of LOOP if it was not computed
2621 already. */
2623 struct niter_desc *
2624 get_simple_loop_desc (struct loop *loop)
2626 struct niter_desc *desc = simple_loop_desc (loop);
2628 if (desc)
2629 return desc;
2631 desc = xmalloc (sizeof (struct niter_desc));
2632 iv_analysis_loop_init (loop);
2633 find_simple_exit (loop, desc);
2634 loop->aux = desc;
2636 return desc;
2639 /* Releases simple loop description for LOOP. */
2641 void
2642 free_simple_loop_desc (struct loop *loop)
2644 struct niter_desc *desc = simple_loop_desc (loop);
2646 if (!desc)
2647 return;
2649 free (desc);
2650 loop->aux = NULL;