Merge from gomp branch:
[official-gcc.git] / gcc / loop-iv.c
blob8e915a04b230af9cc0b5aec931e3cc8ab4f1a0a6
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
26 demand.
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
35 only one).
37 Induction variable is then simply analyzed by walking the use-def
38 chains.
40 Usage:
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
46 ...
48 iv_analysis_done (); */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "basic-block.h"
58 #include "cfgloop.h"
59 #include "expr.h"
60 #include "intl.h"
61 #include "output.h"
62 #include "toplev.h"
64 /* The insn information. */
66 struct insn_info
68 /* Id of the insn. */
69 unsigned luid;
71 /* The previous definition of the register defined by the single
72 set in the insn. */
73 rtx prev_def;
75 /* The description of the iv. */
76 struct rtx_iv iv;
79 static struct insn_info *insn_info;
81 /* The last definition of register. */
83 static rtx *last_def;
85 /* The bivs. */
87 static struct rtx_iv *bivs;
89 /* Maximal insn number for that there is place in insn_info array. */
91 static unsigned max_insn_no;
93 /* Maximal register number for that there is place in bivs and last_def
94 arrays. */
96 static unsigned max_reg_no;
98 /* Dumps information about IV to FILE. */
100 extern void dump_iv_info (FILE *, struct rtx_iv *);
101 void
102 dump_iv_info (FILE *file, struct rtx_iv *iv)
104 if (!iv->base)
106 fprintf (file, "not simple");
107 return;
110 if (iv->step == const0_rtx
111 && !iv->first_special)
112 fprintf (file, "invariant ");
114 print_rtl (file, iv->base);
115 if (iv->step != const0_rtx)
117 fprintf (file, " + ");
118 print_rtl (file, iv->step);
119 fprintf (file, " * iteration");
121 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
123 if (iv->mode != iv->extend_mode)
124 fprintf (file, " %s to %s",
125 rtx_name[iv->extend],
126 GET_MODE_NAME (iv->extend_mode));
128 if (iv->mult != const1_rtx)
130 fprintf (file, " * ");
131 print_rtl (file, iv->mult);
133 if (iv->delta != const0_rtx)
135 fprintf (file, " + ");
136 print_rtl (file, iv->delta);
138 if (iv->first_special)
139 fprintf (file, " (first special)");
142 /* Assigns luids to insns in basic block BB. */
144 static void
145 assign_luids (basic_block bb)
147 unsigned i = 0, uid;
148 rtx insn;
150 FOR_BB_INSNS (bb, insn)
152 uid = INSN_UID (insn);
153 insn_info[uid].luid = i++;
154 insn_info[uid].prev_def = NULL_RTX;
155 insn_info[uid].iv.analysed = false;
159 /* Generates a subreg to get the least significant part of EXPR (in mode
160 INNER_MODE) to OUTER_MODE. */
163 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
164 enum machine_mode inner_mode)
166 return simplify_gen_subreg (outer_mode, expr, inner_mode,
167 subreg_lowpart_offset (outer_mode, inner_mode));
170 /* Checks whether REG is a well-behaved register. */
172 static bool
173 simple_reg_p (rtx reg)
175 unsigned r;
177 if (GET_CODE (reg) == SUBREG)
179 if (!subreg_lowpart_p (reg))
180 return false;
181 reg = SUBREG_REG (reg);
184 if (!REG_P (reg))
185 return false;
187 r = REGNO (reg);
188 if (HARD_REGISTER_NUM_P (r))
189 return false;
191 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
192 return false;
194 if (last_def[r] == const0_rtx)
195 return false;
197 return true;
200 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
202 static bool
203 simple_set_p (rtx lhs, rtx rhs)
205 rtx op0, op1;
207 if (!REG_P (lhs)
208 || !simple_reg_p (lhs))
209 return false;
211 if (CONSTANT_P (rhs))
212 return true;
214 switch (GET_CODE (rhs))
216 case SUBREG:
217 case REG:
218 return simple_reg_p (rhs);
220 case SIGN_EXTEND:
221 case ZERO_EXTEND:
222 case NEG:
223 return simple_reg_p (XEXP (rhs, 0));
225 case PLUS:
226 case MINUS:
227 case MULT:
228 case ASHIFT:
229 op0 = XEXP (rhs, 0);
230 op1 = XEXP (rhs, 1);
232 if (!simple_reg_p (op0)
233 && !CONSTANT_P (op0))
234 return false;
236 if (!simple_reg_p (op1)
237 && !CONSTANT_P (op1))
238 return false;
240 if (GET_CODE (rhs) == MULT
241 && !CONSTANT_P (op0)
242 && !CONSTANT_P (op1))
243 return false;
245 if (GET_CODE (rhs) == ASHIFT
246 && CONSTANT_P (op0))
247 return false;
249 return true;
251 default:
252 return false;
256 /* Mark single SET in INSN. */
258 static rtx
259 mark_single_set (rtx insn, rtx set)
261 rtx def = SET_DEST (set), src;
262 unsigned regno, uid;
264 src = find_reg_equal_equiv_note (insn);
265 if (src)
266 src = XEXP (src, 0);
267 else
268 src = SET_SRC (set);
270 if (!simple_set_p (SET_DEST (set), src))
271 return NULL_RTX;
273 regno = REGNO (def);
274 uid = INSN_UID (insn);
276 bivs[regno].analysed = false;
277 insn_info[uid].prev_def = last_def[regno];
278 last_def[regno] = insn;
280 return def;
283 /* Invalidate register REG unless it is equal to EXCEPT. */
285 static void
286 kill_sets (rtx reg, rtx by ATTRIBUTE_UNUSED, void *except)
288 if (GET_CODE (reg) == SUBREG)
289 reg = SUBREG_REG (reg);
290 if (!REG_P (reg))
291 return;
292 if (reg == except)
293 return;
295 last_def[REGNO (reg)] = const0_rtx;
298 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
299 latch. */
301 static void
302 mark_sets (basic_block bb, bool dom)
304 rtx insn, set, def;
306 FOR_BB_INSNS (bb, insn)
308 if (!INSN_P (insn))
309 continue;
311 if (dom
312 && (set = single_set (insn)))
313 def = mark_single_set (insn, set);
314 else
315 def = NULL_RTX;
317 note_stores (PATTERN (insn), kill_sets, def);
321 /* Prepare the data for an induction variable analysis of a LOOP. */
323 void
324 iv_analysis_loop_init (struct loop *loop)
326 basic_block *body = get_loop_body_in_dom_order (loop);
327 unsigned b;
329 if ((unsigned) get_max_uid () >= max_insn_no)
331 /* Add some reserve for insns and registers produced in optimizations. */
332 max_insn_no = get_max_uid () + 100;
333 if (insn_info)
334 free (insn_info);
335 insn_info = xmalloc (max_insn_no * sizeof (struct insn_info));
338 if ((unsigned) max_reg_num () >= max_reg_no)
340 max_reg_no = max_reg_num () + 100;
341 if (last_def)
342 free (last_def);
343 last_def = xmalloc (max_reg_no * sizeof (rtx));
344 if (bivs)
345 free (bivs);
346 bivs = xmalloc (max_reg_no * sizeof (struct rtx_iv));
349 memset (last_def, 0, max_reg_num () * sizeof (rtx));
351 for (b = 0; b < loop->num_nodes; b++)
353 assign_luids (body[b]);
354 mark_sets (body[b], just_once_each_iteration_p (loop, body[b]));
357 free (body);
360 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
361 is returned. If INSN is before the first def in the loop, NULL_RTX is
362 returned. */
365 iv_get_reaching_def (rtx insn, rtx reg)
367 unsigned regno, luid, auid;
368 rtx ainsn;
369 basic_block bb, abb;
371 if (GET_CODE (reg) == SUBREG)
373 if (!subreg_lowpart_p (reg))
374 return const0_rtx;
375 reg = SUBREG_REG (reg);
377 if (!REG_P (reg))
378 return NULL_RTX;
380 regno = REGNO (reg);
381 if (!last_def[regno]
382 || last_def[regno] == const0_rtx)
383 return last_def[regno];
385 bb = BLOCK_FOR_INSN (insn);
386 luid = insn_info[INSN_UID (insn)].luid;
388 ainsn = last_def[regno];
389 while (1)
391 abb = BLOCK_FOR_INSN (ainsn);
393 if (dominated_by_p (CDI_DOMINATORS, bb, abb))
394 break;
396 auid = INSN_UID (ainsn);
397 ainsn = insn_info[auid].prev_def;
399 if (!ainsn)
400 return NULL_RTX;
403 while (1)
405 abb = BLOCK_FOR_INSN (ainsn);
406 if (abb != bb)
407 return ainsn;
409 auid = INSN_UID (ainsn);
410 if (luid > insn_info[auid].luid)
411 return ainsn;
413 ainsn = insn_info[auid].prev_def;
414 if (!ainsn)
415 return NULL_RTX;
419 /* Sets IV to invariant CST in MODE. Always returns true (just for
420 consistency with other iv manipulation functions that may fail). */
422 static bool
423 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
425 if (mode == VOIDmode)
426 mode = GET_MODE (cst);
428 iv->analysed = true;
429 iv->mode = mode;
430 iv->base = cst;
431 iv->step = const0_rtx;
432 iv->first_special = false;
433 iv->extend = UNKNOWN;
434 iv->extend_mode = iv->mode;
435 iv->delta = const0_rtx;
436 iv->mult = const1_rtx;
438 return true;
441 /* Evaluates application of subreg to MODE on IV. */
443 static bool
444 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
446 /* If iv is invariant, just calculate the new value. */
447 if (iv->step == const0_rtx
448 && !iv->first_special)
450 rtx val = get_iv_value (iv, const0_rtx);
451 val = lowpart_subreg (mode, val, iv->extend_mode);
453 iv->base = val;
454 iv->extend = UNKNOWN;
455 iv->mode = iv->extend_mode = mode;
456 iv->delta = const0_rtx;
457 iv->mult = const1_rtx;
458 return true;
461 if (iv->extend_mode == mode)
462 return true;
464 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
465 return false;
467 iv->extend = UNKNOWN;
468 iv->mode = mode;
470 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
471 simplify_gen_binary (MULT, iv->extend_mode,
472 iv->base, iv->mult));
473 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
474 iv->mult = const1_rtx;
475 iv->delta = const0_rtx;
476 iv->first_special = false;
478 return true;
481 /* Evaluates application of EXTEND to MODE on IV. */
483 static bool
484 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
486 /* If iv is invariant, just calculate the new value. */
487 if (iv->step == const0_rtx
488 && !iv->first_special)
490 rtx val = get_iv_value (iv, const0_rtx);
491 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
493 iv->base = val;
494 iv->extend = UNKNOWN;
495 iv->mode = iv->extend_mode = mode;
496 iv->delta = const0_rtx;
497 iv->mult = const1_rtx;
498 return true;
501 if (mode != iv->extend_mode)
502 return false;
504 if (iv->extend != UNKNOWN
505 && iv->extend != extend)
506 return false;
508 iv->extend = extend;
510 return true;
513 /* Evaluates negation of IV. */
515 static bool
516 iv_neg (struct rtx_iv *iv)
518 if (iv->extend == UNKNOWN)
520 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
521 iv->base, iv->extend_mode);
522 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
523 iv->step, iv->extend_mode);
525 else
527 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
528 iv->delta, iv->extend_mode);
529 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
530 iv->mult, iv->extend_mode);
533 return true;
536 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
538 static bool
539 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
541 enum machine_mode mode;
542 rtx arg;
544 /* Extend the constant to extend_mode of the other operand if necessary. */
545 if (iv0->extend == UNKNOWN
546 && iv0->mode == iv0->extend_mode
547 && iv0->step == const0_rtx
548 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
550 iv0->extend_mode = iv1->extend_mode;
551 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
552 iv0->base, iv0->mode);
554 if (iv1->extend == UNKNOWN
555 && iv1->mode == iv1->extend_mode
556 && iv1->step == const0_rtx
557 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
559 iv1->extend_mode = iv0->extend_mode;
560 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
561 iv1->base, iv1->mode);
564 mode = iv0->extend_mode;
565 if (mode != iv1->extend_mode)
566 return false;
568 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
570 if (iv0->mode != iv1->mode)
571 return false;
573 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
574 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
576 return true;
579 /* Handle addition of constant. */
580 if (iv1->extend == UNKNOWN
581 && iv1->mode == mode
582 && iv1->step == const0_rtx)
584 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
585 return true;
588 if (iv0->extend == UNKNOWN
589 && iv0->mode == mode
590 && iv0->step == const0_rtx)
592 arg = iv0->base;
593 *iv0 = *iv1;
594 if (op == MINUS
595 && !iv_neg (iv0))
596 return false;
598 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
599 return true;
602 return false;
605 /* Evaluates multiplication of IV by constant CST. */
607 static bool
608 iv_mult (struct rtx_iv *iv, rtx mby)
610 enum machine_mode mode = iv->extend_mode;
612 if (GET_MODE (mby) != VOIDmode
613 && GET_MODE (mby) != mode)
614 return false;
616 if (iv->extend == UNKNOWN)
618 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
619 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
621 else
623 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
624 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
627 return true;
630 /* Evaluates shift of IV by constant CST. */
632 static bool
633 iv_shift (struct rtx_iv *iv, rtx mby)
635 enum machine_mode mode = iv->extend_mode;
637 if (GET_MODE (mby) != VOIDmode
638 && GET_MODE (mby) != mode)
639 return false;
641 if (iv->extend == UNKNOWN)
643 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
644 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
646 else
648 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
649 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
652 return true;
655 /* The recursive part of get_biv_step. Gets the value of the single value
656 defined in INSN wrto initial value of REG inside loop, in shape described
657 at get_biv_step. */
659 static bool
660 get_biv_step_1 (rtx insn, rtx reg,
661 rtx *inner_step, enum machine_mode *inner_mode,
662 enum rtx_code *extend, enum machine_mode outer_mode,
663 rtx *outer_step)
665 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
666 rtx next, nextr, def_insn, tmp;
667 enum rtx_code code;
669 set = single_set (insn);
670 rhs = find_reg_equal_equiv_note (insn);
671 if (rhs)
672 rhs = XEXP (rhs, 0);
673 else
674 rhs = SET_SRC (set);
676 code = GET_CODE (rhs);
677 switch (code)
679 case SUBREG:
680 case REG:
681 next = rhs;
682 break;
684 case PLUS:
685 case MINUS:
686 op0 = XEXP (rhs, 0);
687 op1 = XEXP (rhs, 1);
689 if (code == PLUS && CONSTANT_P (op0))
691 tmp = op0; op0 = op1; op1 = tmp;
694 if (!simple_reg_p (op0)
695 || !CONSTANT_P (op1))
696 return false;
698 if (GET_MODE (rhs) != outer_mode)
700 /* ppc64 uses expressions like
702 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
704 this is equivalent to
706 (set x':DI (plus:DI y:DI 1))
707 (set x:SI (subreg:SI (x':DI)). */
708 if (GET_CODE (op0) != SUBREG)
709 return false;
710 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
711 return false;
714 next = op0;
715 break;
717 case SIGN_EXTEND:
718 case ZERO_EXTEND:
719 if (GET_MODE (rhs) != outer_mode)
720 return false;
722 op0 = XEXP (rhs, 0);
723 if (!simple_reg_p (op0))
724 return false;
726 next = op0;
727 break;
729 default:
730 return false;
733 if (GET_CODE (next) == SUBREG)
735 if (!subreg_lowpart_p (next))
736 return false;
738 nextr = SUBREG_REG (next);
739 if (GET_MODE (nextr) != outer_mode)
740 return false;
742 else
743 nextr = next;
745 def_insn = iv_get_reaching_def (insn, nextr);
746 if (def_insn == const0_rtx)
747 return false;
749 if (!def_insn)
751 if (!rtx_equal_p (nextr, reg))
752 return false;
754 *inner_step = const0_rtx;
755 *extend = UNKNOWN;
756 *inner_mode = outer_mode;
757 *outer_step = const0_rtx;
759 else if (!get_biv_step_1 (def_insn, reg,
760 inner_step, inner_mode, extend, outer_mode,
761 outer_step))
762 return false;
764 if (GET_CODE (next) == SUBREG)
766 enum machine_mode amode = GET_MODE (next);
768 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
769 return false;
771 *inner_mode = amode;
772 *inner_step = simplify_gen_binary (PLUS, outer_mode,
773 *inner_step, *outer_step);
774 *outer_step = const0_rtx;
775 *extend = UNKNOWN;
778 switch (code)
780 case REG:
781 case SUBREG:
782 break;
784 case PLUS:
785 case MINUS:
786 if (*inner_mode == outer_mode
787 /* See comment in previous switch. */
788 || GET_MODE (rhs) != outer_mode)
789 *inner_step = simplify_gen_binary (code, outer_mode,
790 *inner_step, op1);
791 else
792 *outer_step = simplify_gen_binary (code, outer_mode,
793 *outer_step, op1);
794 break;
796 case SIGN_EXTEND:
797 case ZERO_EXTEND:
798 gcc_assert (GET_MODE (op0) == *inner_mode
799 && *extend == UNKNOWN
800 && *outer_step == const0_rtx);
802 *extend = code;
803 break;
805 default:
806 gcc_unreachable ();
809 return true;
812 /* Gets the operation on register REG inside loop, in shape
814 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
816 If the operation cannot be described in this shape, return false. */
818 static bool
819 get_biv_step (rtx reg, rtx *inner_step, enum machine_mode *inner_mode,
820 enum rtx_code *extend, enum machine_mode *outer_mode,
821 rtx *outer_step)
823 *outer_mode = GET_MODE (reg);
825 if (!get_biv_step_1 (last_def[REGNO (reg)], reg,
826 inner_step, inner_mode, extend, *outer_mode,
827 outer_step))
828 return false;
830 gcc_assert ((*inner_mode == *outer_mode) != (*extend != UNKNOWN));
831 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
833 return true;
836 /* Determines whether DEF is a biv and if so, stores its description
837 to *IV. */
839 static bool
840 iv_analyze_biv (rtx def, struct rtx_iv *iv)
842 unsigned regno;
843 rtx inner_step, outer_step;
844 enum machine_mode inner_mode, outer_mode;
845 enum rtx_code extend;
847 if (dump_file)
849 fprintf (dump_file, "Analyzing ");
850 print_rtl (dump_file, def);
851 fprintf (dump_file, " for bivness.\n");
854 if (!REG_P (def))
856 if (!CONSTANT_P (def))
857 return false;
859 return iv_constant (iv, def, VOIDmode);
862 regno = REGNO (def);
863 if (last_def[regno] == const0_rtx)
865 if (dump_file)
866 fprintf (dump_file, " not simple.\n");
867 return false;
870 if (last_def[regno] && bivs[regno].analysed)
872 if (dump_file)
873 fprintf (dump_file, " already analysed.\n");
875 *iv = bivs[regno];
876 return iv->base != NULL_RTX;
879 if (!last_def[regno])
881 iv_constant (iv, def, VOIDmode);
882 goto end;
885 iv->analysed = true;
886 if (!get_biv_step (def, &inner_step, &inner_mode, &extend,
887 &outer_mode, &outer_step))
889 iv->base = NULL_RTX;
890 goto end;
893 /* Loop transforms base to es (base + inner_step) + outer_step,
894 where es means extend of subreg between inner_mode and outer_mode.
895 The corresponding induction variable is
897 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
899 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
900 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
901 iv->mode = inner_mode;
902 iv->extend_mode = outer_mode;
903 iv->extend = extend;
904 iv->mult = const1_rtx;
905 iv->delta = outer_step;
906 iv->first_special = inner_mode != outer_mode;
908 end:
909 if (dump_file)
911 fprintf (dump_file, " ");
912 dump_iv_info (dump_file, iv);
913 fprintf (dump_file, "\n");
916 bivs[regno] = *iv;
918 return iv->base != NULL_RTX;
921 /* Analyzes operand OP of INSN and stores the result to *IV. */
923 static bool
924 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
926 rtx def_insn;
927 unsigned regno;
928 bool inv = CONSTANT_P (op);
930 if (dump_file)
932 fprintf (dump_file, "Analyzing operand ");
933 print_rtl (dump_file, op);
934 fprintf (dump_file, " of insn ");
935 print_rtl_single (dump_file, insn);
938 if (GET_CODE (op) == SUBREG)
940 if (!subreg_lowpart_p (op))
941 return false;
943 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
944 return false;
946 return iv_subreg (iv, GET_MODE (op));
949 if (!inv)
951 regno = REGNO (op);
952 if (!last_def[regno])
953 inv = true;
954 else if (last_def[regno] == const0_rtx)
956 if (dump_file)
957 fprintf (dump_file, " not simple.\n");
958 return false;
962 if (inv)
964 iv_constant (iv, op, VOIDmode);
966 if (dump_file)
968 fprintf (dump_file, " ");
969 dump_iv_info (dump_file, iv);
970 fprintf (dump_file, "\n");
972 return true;
975 def_insn = iv_get_reaching_def (insn, op);
976 if (def_insn == const0_rtx)
978 if (dump_file)
979 fprintf (dump_file, " not simple.\n");
980 return false;
983 return iv_analyze (def_insn, op, iv);
986 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
988 bool
989 iv_analyze (rtx insn, rtx def, struct rtx_iv *iv)
991 unsigned uid;
992 rtx set, rhs, mby = NULL_RTX, tmp;
993 rtx op0 = NULL_RTX, op1 = NULL_RTX;
994 struct rtx_iv iv0, iv1;
995 enum machine_mode amode;
996 enum rtx_code code;
998 if (insn == const0_rtx)
999 return false;
1001 if (GET_CODE (def) == SUBREG)
1003 if (!subreg_lowpart_p (def))
1004 return false;
1006 if (!iv_analyze (insn, SUBREG_REG (def), iv))
1007 return false;
1009 return iv_subreg (iv, GET_MODE (def));
1012 if (!insn)
1013 return iv_analyze_biv (def, iv);
1015 if (dump_file)
1017 fprintf (dump_file, "Analyzing def of ");
1018 print_rtl (dump_file, def);
1019 fprintf (dump_file, " in insn ");
1020 print_rtl_single (dump_file, insn);
1023 uid = INSN_UID (insn);
1024 if (insn_info[uid].iv.analysed)
1026 if (dump_file)
1027 fprintf (dump_file, " already analysed.\n");
1028 *iv = insn_info[uid].iv;
1029 return iv->base != NULL_RTX;
1032 iv->mode = VOIDmode;
1033 iv->base = NULL_RTX;
1034 iv->step = NULL_RTX;
1036 set = single_set (insn);
1037 rhs = find_reg_equal_equiv_note (insn);
1038 if (rhs)
1039 rhs = XEXP (rhs, 0);
1040 else
1041 rhs = SET_SRC (set);
1042 code = GET_CODE (rhs);
1044 if (CONSTANT_P (rhs))
1046 op0 = rhs;
1047 amode = GET_MODE (def);
1049 else
1051 switch (code)
1053 case SUBREG:
1054 if (!subreg_lowpart_p (rhs))
1055 goto end;
1056 op0 = rhs;
1057 break;
1059 case REG:
1060 op0 = rhs;
1061 break;
1063 case SIGN_EXTEND:
1064 case ZERO_EXTEND:
1065 case NEG:
1066 op0 = XEXP (rhs, 0);
1067 break;
1069 case PLUS:
1070 case MINUS:
1071 op0 = XEXP (rhs, 0);
1072 op1 = XEXP (rhs, 1);
1073 break;
1075 case MULT:
1076 op0 = XEXP (rhs, 0);
1077 mby = XEXP (rhs, 1);
1078 if (!CONSTANT_P (mby))
1080 gcc_assert (CONSTANT_P (op0));
1081 tmp = op0;
1082 op0 = mby;
1083 mby = tmp;
1085 break;
1087 case ASHIFT:
1088 gcc_assert (!CONSTANT_P (XEXP (rhs, 0)));
1089 op0 = XEXP (rhs, 0);
1090 mby = XEXP (rhs, 1);
1091 break;
1093 default:
1094 gcc_unreachable ();
1097 amode = GET_MODE (rhs);
1100 if (op0)
1102 if (!iv_analyze_op (insn, op0, &iv0))
1103 goto end;
1105 if (iv0.mode == VOIDmode)
1107 iv0.mode = amode;
1108 iv0.extend_mode = amode;
1112 if (op1)
1114 if (!iv_analyze_op (insn, op1, &iv1))
1115 goto end;
1117 if (iv1.mode == VOIDmode)
1119 iv1.mode = amode;
1120 iv1.extend_mode = amode;
1124 switch (code)
1126 case SIGN_EXTEND:
1127 case ZERO_EXTEND:
1128 if (!iv_extend (&iv0, code, amode))
1129 goto end;
1130 break;
1132 case NEG:
1133 if (!iv_neg (&iv0))
1134 goto end;
1135 break;
1137 case PLUS:
1138 case MINUS:
1139 if (!iv_add (&iv0, &iv1, code))
1140 goto end;
1141 break;
1143 case MULT:
1144 if (!iv_mult (&iv0, mby))
1145 goto end;
1146 break;
1148 case ASHIFT:
1149 if (!iv_shift (&iv0, mby))
1150 goto end;
1151 break;
1153 default:
1154 break;
1157 *iv = iv0;
1159 end:
1160 iv->analysed = true;
1161 insn_info[uid].iv = *iv;
1163 if (dump_file)
1165 print_rtl (dump_file, def);
1166 fprintf (dump_file, " in insn ");
1167 print_rtl_single (dump_file, insn);
1168 fprintf (dump_file, " is ");
1169 dump_iv_info (dump_file, iv);
1170 fprintf (dump_file, "\n");
1173 return iv->base != NULL_RTX;
1176 /* Checks whether definition of register REG in INSN a basic induction
1177 variable. IV analysis must have been initialized (via a call to
1178 iv_analysis_loop_init) for this function to produce a result. */
1180 bool
1181 biv_p (rtx insn, rtx reg)
1183 struct rtx_iv iv;
1185 if (!REG_P (reg))
1186 return false;
1188 if (last_def[REGNO (reg)] != insn)
1189 return false;
1191 return iv_analyze_biv (reg, &iv);
1194 /* Calculates value of IV at ITERATION-th iteration. */
1197 get_iv_value (struct rtx_iv *iv, rtx iteration)
1199 rtx val;
1201 /* We would need to generate some if_then_else patterns, and so far
1202 it is not needed anywhere. */
1203 gcc_assert (!iv->first_special);
1205 if (iv->step != const0_rtx && iteration != const0_rtx)
1206 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1207 simplify_gen_binary (MULT, iv->extend_mode,
1208 iv->step, iteration));
1209 else
1210 val = iv->base;
1212 if (iv->extend_mode == iv->mode)
1213 return val;
1215 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1217 if (iv->extend == UNKNOWN)
1218 return val;
1220 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1221 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1222 simplify_gen_binary (MULT, iv->extend_mode,
1223 iv->mult, val));
1225 return val;
1228 /* Free the data for an induction variable analysis. */
1230 void
1231 iv_analysis_done (void)
1233 max_insn_no = 0;
1234 max_reg_no = 0;
1235 if (insn_info)
1237 free (insn_info);
1238 insn_info = NULL;
1240 if (last_def)
1242 free (last_def);
1243 last_def = NULL;
1245 if (bivs)
1247 free (bivs);
1248 bivs = NULL;
1252 /* Computes inverse to X modulo (1 << MOD). */
1254 static unsigned HOST_WIDEST_INT
1255 inverse (unsigned HOST_WIDEST_INT x, int mod)
1257 unsigned HOST_WIDEST_INT mask =
1258 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1259 unsigned HOST_WIDEST_INT rslt = 1;
1260 int i;
1262 for (i = 0; i < mod - 1; i++)
1264 rslt = (rslt * x) & mask;
1265 x = (x * x) & mask;
1268 return rslt;
1271 /* Tries to estimate the maximum number of iterations. */
1273 static unsigned HOST_WIDEST_INT
1274 determine_max_iter (struct niter_desc *desc)
1276 rtx niter = desc->niter_expr;
1277 rtx mmin, mmax, left, right;
1278 unsigned HOST_WIDEST_INT nmax, inc;
1280 if (GET_CODE (niter) == AND
1281 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
1283 nmax = INTVAL (XEXP (niter, 0));
1284 if (!(nmax & (nmax + 1)))
1286 desc->niter_max = nmax;
1287 return nmax;
1291 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
1292 nmax = INTVAL (mmax) - INTVAL (mmin);
1294 if (GET_CODE (niter) == UDIV)
1296 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
1298 desc->niter_max = nmax;
1299 return nmax;
1301 inc = INTVAL (XEXP (niter, 1));
1302 niter = XEXP (niter, 0);
1304 else
1305 inc = 1;
1307 if (GET_CODE (niter) == PLUS)
1309 left = XEXP (niter, 0);
1310 right = XEXP (niter, 0);
1312 if (GET_CODE (right) == CONST_INT)
1313 right = GEN_INT (-INTVAL (right));
1315 else if (GET_CODE (niter) == MINUS)
1317 left = XEXP (niter, 0);
1318 right = XEXP (niter, 0);
1320 else
1322 left = niter;
1323 right = mmin;
1326 if (GET_CODE (left) == CONST_INT)
1327 mmax = left;
1328 if (GET_CODE (right) == CONST_INT)
1329 mmin = right;
1330 nmax = INTVAL (mmax) - INTVAL (mmin);
1332 desc->niter_max = nmax / inc;
1333 return nmax / inc;
1336 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1338 static int
1339 altered_reg_used (rtx *reg, void *alt)
1341 if (!REG_P (*reg))
1342 return 0;
1344 return REGNO_REG_SET_P (alt, REGNO (*reg));
1347 /* Marks registers altered by EXPR in set ALT. */
1349 static void
1350 mark_altered (rtx expr, rtx by ATTRIBUTE_UNUSED, void *alt)
1352 if (GET_CODE (expr) == SUBREG)
1353 expr = SUBREG_REG (expr);
1354 if (!REG_P (expr))
1355 return;
1357 SET_REGNO_REG_SET (alt, REGNO (expr));
1360 /* Checks whether RHS is simple enough to process. */
1362 static bool
1363 simple_rhs_p (rtx rhs)
1365 rtx op0, op1;
1367 if (CONSTANT_P (rhs)
1368 || REG_P (rhs))
1369 return true;
1371 switch (GET_CODE (rhs))
1373 case PLUS:
1374 case MINUS:
1375 op0 = XEXP (rhs, 0);
1376 op1 = XEXP (rhs, 1);
1377 /* Allow reg + const sets only. */
1378 if (REG_P (op0) && CONSTANT_P (op1))
1379 return true;
1380 if (REG_P (op1) && CONSTANT_P (op0))
1381 return true;
1383 return false;
1385 default:
1386 return false;
1390 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1391 altered so far. */
1393 static void
1394 simplify_using_assignment (rtx insn, rtx *expr, regset altered)
1396 rtx set = single_set (insn);
1397 rtx lhs = NULL_RTX, rhs;
1398 bool ret = false;
1400 if (set)
1402 lhs = SET_DEST (set);
1403 if (!REG_P (lhs)
1404 || altered_reg_used (&lhs, altered))
1405 ret = true;
1407 else
1408 ret = true;
1410 note_stores (PATTERN (insn), mark_altered, altered);
1411 if (CALL_P (insn))
1413 int i;
1415 /* Kill all call clobbered registers. */
1416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1417 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1418 SET_REGNO_REG_SET (altered, i);
1421 if (ret)
1422 return;
1424 rhs = find_reg_equal_equiv_note (insn);
1425 if (rhs)
1426 rhs = XEXP (rhs, 0);
1427 else
1428 rhs = SET_SRC (set);
1430 if (!simple_rhs_p (rhs))
1431 return;
1433 if (for_each_rtx (&rhs, altered_reg_used, altered))
1434 return;
1436 *expr = simplify_replace_rtx (*expr, lhs, rhs);
1439 /* Checks whether A implies B. */
1441 static bool
1442 implies_p (rtx a, rtx b)
1444 rtx op0, op1, opb0, opb1, r;
1445 enum machine_mode mode;
1447 if (GET_CODE (a) == EQ)
1449 op0 = XEXP (a, 0);
1450 op1 = XEXP (a, 1);
1452 if (REG_P (op0))
1454 r = simplify_replace_rtx (b, op0, op1);
1455 if (r == const_true_rtx)
1456 return true;
1459 if (REG_P (op1))
1461 r = simplify_replace_rtx (b, op1, op0);
1462 if (r == const_true_rtx)
1463 return true;
1467 /* A < B implies A + 1 <= B. */
1468 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1469 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1471 op0 = XEXP (a, 0);
1472 op1 = XEXP (a, 1);
1473 opb0 = XEXP (b, 0);
1474 opb1 = XEXP (b, 1);
1476 if (GET_CODE (a) == GT)
1478 r = op0;
1479 op0 = op1;
1480 op1 = r;
1483 if (GET_CODE (b) == GE)
1485 r = opb0;
1486 opb0 = opb1;
1487 opb1 = r;
1490 mode = GET_MODE (op0);
1491 if (mode != GET_MODE (opb0))
1492 mode = VOIDmode;
1493 else if (mode == VOIDmode)
1495 mode = GET_MODE (op1);
1496 if (mode != GET_MODE (opb1))
1497 mode = VOIDmode;
1500 if (mode != VOIDmode
1501 && rtx_equal_p (op1, opb1)
1502 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1503 return true;
1506 return false;
1509 /* Canonicalizes COND so that
1511 (1) Ensure that operands are ordered according to
1512 swap_commutative_operands_p.
1513 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1514 for GE, GEU, and LEU. */
1517 canon_condition (rtx cond)
1519 rtx tem;
1520 rtx op0, op1;
1521 enum rtx_code code;
1522 enum machine_mode mode;
1524 code = GET_CODE (cond);
1525 op0 = XEXP (cond, 0);
1526 op1 = XEXP (cond, 1);
1528 if (swap_commutative_operands_p (op0, op1))
1530 code = swap_condition (code);
1531 tem = op0;
1532 op0 = op1;
1533 op1 = tem;
1536 mode = GET_MODE (op0);
1537 if (mode == VOIDmode)
1538 mode = GET_MODE (op1);
1539 gcc_assert (mode != VOIDmode);
1541 if (GET_CODE (op1) == CONST_INT
1542 && GET_MODE_CLASS (mode) != MODE_CC
1543 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1545 HOST_WIDE_INT const_val = INTVAL (op1);
1546 unsigned HOST_WIDE_INT uconst_val = const_val;
1547 unsigned HOST_WIDE_INT max_val
1548 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1550 switch (code)
1552 case LE:
1553 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1554 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1555 break;
1557 /* When cross-compiling, const_val might be sign-extended from
1558 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1559 case GE:
1560 if ((HOST_WIDE_INT) (const_val & max_val)
1561 != (((HOST_WIDE_INT) 1
1562 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1563 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1564 break;
1566 case LEU:
1567 if (uconst_val < max_val)
1568 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1569 break;
1571 case GEU:
1572 if (uconst_val != 0)
1573 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1574 break;
1576 default:
1577 break;
1581 if (op0 != XEXP (cond, 0)
1582 || op1 != XEXP (cond, 1)
1583 || code != GET_CODE (cond)
1584 || GET_MODE (cond) != SImode)
1585 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1587 return cond;
1590 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1591 set of altered regs. */
1593 void
1594 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1596 rtx rev, reve, exp = *expr;
1598 if (!COMPARISON_P (exp))
1599 return;
1601 /* If some register gets altered later, we do not really speak about its
1602 value at the time of comparison. */
1603 if (altered
1604 && for_each_rtx (&cond, altered_reg_used, altered))
1605 return;
1607 rev = reversed_condition (cond);
1608 reve = reversed_condition (exp);
1610 cond = canon_condition (cond);
1611 exp = canon_condition (exp);
1612 if (rev)
1613 rev = canon_condition (rev);
1614 if (reve)
1615 reve = canon_condition (reve);
1617 if (rtx_equal_p (exp, cond))
1619 *expr = const_true_rtx;
1620 return;
1624 if (rev && rtx_equal_p (exp, rev))
1626 *expr = const0_rtx;
1627 return;
1630 if (implies_p (cond, exp))
1632 *expr = const_true_rtx;
1633 return;
1636 if (reve && implies_p (cond, reve))
1638 *expr = const0_rtx;
1639 return;
1642 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1643 be false. */
1644 if (rev && implies_p (exp, rev))
1646 *expr = const0_rtx;
1647 return;
1650 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1651 if (rev && reve && implies_p (reve, rev))
1653 *expr = const_true_rtx;
1654 return;
1657 /* We would like to have some other tests here. TODO. */
1659 return;
1662 /* Use relationship between A and *B to eventually eliminate *B.
1663 OP is the operation we consider. */
1665 static void
1666 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1668 switch (op)
1670 case AND:
1671 /* If A implies *B, we may replace *B by true. */
1672 if (implies_p (a, *b))
1673 *b = const_true_rtx;
1674 break;
1676 case IOR:
1677 /* If *B implies A, we may replace *B by false. */
1678 if (implies_p (*b, a))
1679 *b = const0_rtx;
1680 break;
1682 default:
1683 gcc_unreachable ();
1687 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1688 operation we consider. */
1690 static void
1691 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1693 rtx elt;
1695 for (elt = tail; elt; elt = XEXP (elt, 1))
1696 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1697 for (elt = tail; elt; elt = XEXP (elt, 1))
1698 eliminate_implied_condition (op, XEXP (elt, 0), head);
1701 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1702 is a list, its elements are assumed to be combined using OP. */
1704 static void
1705 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1707 rtx head, tail, insn;
1708 rtx neutral, aggr;
1709 regset altered;
1710 edge e;
1712 if (!*expr)
1713 return;
1715 if (CONSTANT_P (*expr))
1716 return;
1718 if (GET_CODE (*expr) == EXPR_LIST)
1720 head = XEXP (*expr, 0);
1721 tail = XEXP (*expr, 1);
1723 eliminate_implied_conditions (op, &head, tail);
1725 switch (op)
1727 case AND:
1728 neutral = const_true_rtx;
1729 aggr = const0_rtx;
1730 break;
1732 case IOR:
1733 neutral = const0_rtx;
1734 aggr = const_true_rtx;
1735 break;
1737 default:
1738 gcc_unreachable ();
1741 simplify_using_initial_values (loop, UNKNOWN, &head);
1742 if (head == aggr)
1744 XEXP (*expr, 0) = aggr;
1745 XEXP (*expr, 1) = NULL_RTX;
1746 return;
1748 else if (head == neutral)
1750 *expr = tail;
1751 simplify_using_initial_values (loop, op, expr);
1752 return;
1754 simplify_using_initial_values (loop, op, &tail);
1756 if (tail && XEXP (tail, 0) == aggr)
1758 *expr = tail;
1759 return;
1762 XEXP (*expr, 0) = head;
1763 XEXP (*expr, 1) = tail;
1764 return;
1767 gcc_assert (op == UNKNOWN);
1769 e = loop_preheader_edge (loop);
1770 if (e->src == ENTRY_BLOCK_PTR)
1771 return;
1773 altered = ALLOC_REG_SET (&reg_obstack);
1775 while (1)
1777 insn = BB_END (e->src);
1778 if (any_condjump_p (insn))
1780 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1782 if (cond && (e->flags & EDGE_FALLTHRU))
1783 cond = reversed_condition (cond);
1784 if (cond)
1786 simplify_using_condition (cond, expr, altered);
1787 if (CONSTANT_P (*expr))
1789 FREE_REG_SET (altered);
1790 return;
1795 FOR_BB_INSNS_REVERSE (e->src, insn)
1797 if (!INSN_P (insn))
1798 continue;
1800 simplify_using_assignment (insn, expr, altered);
1801 if (CONSTANT_P (*expr))
1803 FREE_REG_SET (altered);
1804 return;
1808 if (!single_pred_p (e->src)
1809 || single_pred (e->src) == ENTRY_BLOCK_PTR)
1810 break;
1811 e = single_pred_edge (e->src);
1814 FREE_REG_SET (altered);
1817 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1818 that IV occurs as left operands of comparison COND and its signedness
1819 is SIGNED_P to DESC. */
1821 static void
1822 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
1823 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
1825 rtx mmin, mmax, cond_over, cond_under;
1827 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
1828 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
1829 iv->base, mmin);
1830 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
1831 iv->base, mmax);
1833 switch (cond)
1835 case LE:
1836 case LT:
1837 case LEU:
1838 case LTU:
1839 if (cond_under != const0_rtx)
1840 desc->infinite =
1841 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1842 if (cond_over != const0_rtx)
1843 desc->noloop_assumptions =
1844 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
1845 break;
1847 case GE:
1848 case GT:
1849 case GEU:
1850 case GTU:
1851 if (cond_over != const0_rtx)
1852 desc->infinite =
1853 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1854 if (cond_under != const0_rtx)
1855 desc->noloop_assumptions =
1856 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
1857 break;
1859 case NE:
1860 if (cond_over != const0_rtx)
1861 desc->infinite =
1862 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1863 if (cond_under != const0_rtx)
1864 desc->infinite =
1865 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1866 break;
1868 default:
1869 gcc_unreachable ();
1872 iv->mode = mode;
1873 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
1876 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1877 subregs of the same mode if possible (sometimes it is necessary to add
1878 some assumptions to DESC). */
1880 static bool
1881 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
1882 enum rtx_code cond, struct niter_desc *desc)
1884 enum machine_mode comp_mode;
1885 bool signed_p;
1887 /* If the ivs behave specially in the first iteration, or are
1888 added/multiplied after extending, we ignore them. */
1889 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
1890 return false;
1891 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
1892 return false;
1894 /* If there is some extend, it must match signedness of the comparison. */
1895 switch (cond)
1897 case LE:
1898 case LT:
1899 if (iv0->extend == ZERO_EXTEND
1900 || iv1->extend == ZERO_EXTEND)
1901 return false;
1902 signed_p = true;
1903 break;
1905 case LEU:
1906 case LTU:
1907 if (iv0->extend == SIGN_EXTEND
1908 || iv1->extend == SIGN_EXTEND)
1909 return false;
1910 signed_p = false;
1911 break;
1913 case NE:
1914 if (iv0->extend != UNKNOWN
1915 && iv1->extend != UNKNOWN
1916 && iv0->extend != iv1->extend)
1917 return false;
1919 signed_p = false;
1920 if (iv0->extend != UNKNOWN)
1921 signed_p = iv0->extend == SIGN_EXTEND;
1922 if (iv1->extend != UNKNOWN)
1923 signed_p = iv1->extend == SIGN_EXTEND;
1924 break;
1926 default:
1927 gcc_unreachable ();
1930 /* Values of both variables should be computed in the same mode. These
1931 might indeed be different, if we have comparison like
1933 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1935 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1936 in different modes. This does not seem impossible to handle, but
1937 it hardly ever occurs in practice.
1939 The only exception is the case when one of operands is invariant.
1940 For example pentium 3 generates comparisons like
1941 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1942 definitely do not want this prevent the optimization. */
1943 comp_mode = iv0->extend_mode;
1944 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
1945 comp_mode = iv1->extend_mode;
1947 if (iv0->extend_mode != comp_mode)
1949 if (iv0->mode != iv0->extend_mode
1950 || iv0->step != const0_rtx)
1951 return false;
1953 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1954 comp_mode, iv0->base, iv0->mode);
1955 iv0->extend_mode = comp_mode;
1958 if (iv1->extend_mode != comp_mode)
1960 if (iv1->mode != iv1->extend_mode
1961 || iv1->step != const0_rtx)
1962 return false;
1964 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1965 comp_mode, iv1->base, iv1->mode);
1966 iv1->extend_mode = comp_mode;
1969 /* Check that both ivs belong to a range of a single mode. If one of the
1970 operands is an invariant, we may need to shorten it into the common
1971 mode. */
1972 if (iv0->mode == iv0->extend_mode
1973 && iv0->step == const0_rtx
1974 && iv0->mode != iv1->mode)
1975 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
1977 if (iv1->mode == iv1->extend_mode
1978 && iv1->step == const0_rtx
1979 && iv0->mode != iv1->mode)
1980 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
1982 if (iv0->mode != iv1->mode)
1983 return false;
1985 desc->mode = iv0->mode;
1986 desc->signed_p = signed_p;
1988 return true;
1991 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
1992 the result into DESC. Very similar to determine_number_of_iterations
1993 (basically its rtl version), complicated by things like subregs. */
1995 static void
1996 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
1997 struct niter_desc *desc)
1999 rtx op0, op1, delta, step, bound, may_xform, def_insn, tmp, tmp0, tmp1;
2000 struct rtx_iv iv0, iv1, tmp_iv;
2001 rtx assumption, may_not_xform;
2002 enum rtx_code cond;
2003 enum machine_mode mode, comp_mode;
2004 rtx mmin, mmax, mode_mmin, mode_mmax;
2005 unsigned HOST_WIDEST_INT s, size, d, inv;
2006 HOST_WIDEST_INT up, down, inc, step_val;
2007 int was_sharp = false;
2008 rtx old_niter;
2009 bool step_is_pow2;
2011 /* The meaning of these assumptions is this:
2012 if !assumptions
2013 then the rest of information does not have to be valid
2014 if noloop_assumptions then the loop does not roll
2015 if infinite then this exit is never used */
2017 desc->assumptions = NULL_RTX;
2018 desc->noloop_assumptions = NULL_RTX;
2019 desc->infinite = NULL_RTX;
2020 desc->simple_p = true;
2022 desc->const_iter = false;
2023 desc->niter_expr = NULL_RTX;
2024 desc->niter_max = 0;
2026 cond = GET_CODE (condition);
2027 gcc_assert (COMPARISON_P (condition));
2029 mode = GET_MODE (XEXP (condition, 0));
2030 if (mode == VOIDmode)
2031 mode = GET_MODE (XEXP (condition, 1));
2032 /* The constant comparisons should be folded. */
2033 gcc_assert (mode != VOIDmode);
2035 /* We only handle integers or pointers. */
2036 if (GET_MODE_CLASS (mode) != MODE_INT
2037 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2038 goto fail;
2040 op0 = XEXP (condition, 0);
2041 def_insn = iv_get_reaching_def (insn, op0);
2042 if (!iv_analyze (def_insn, op0, &iv0))
2043 goto fail;
2044 if (iv0.extend_mode == VOIDmode)
2045 iv0.mode = iv0.extend_mode = mode;
2047 op1 = XEXP (condition, 1);
2048 def_insn = iv_get_reaching_def (insn, op1);
2049 if (!iv_analyze (def_insn, op1, &iv1))
2050 goto fail;
2051 if (iv1.extend_mode == VOIDmode)
2052 iv1.mode = iv1.extend_mode = mode;
2054 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2055 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2056 goto fail;
2058 /* Check condition and normalize it. */
2060 switch (cond)
2062 case GE:
2063 case GT:
2064 case GEU:
2065 case GTU:
2066 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2067 cond = swap_condition (cond);
2068 break;
2069 case NE:
2070 case LE:
2071 case LEU:
2072 case LT:
2073 case LTU:
2074 break;
2075 default:
2076 goto fail;
2079 /* Handle extends. This is relatively nontrivial, so we only try in some
2080 easy cases, when we can canonicalize the ivs (possibly by adding some
2081 assumptions) to shape subreg (base + i * step). This function also fills
2082 in desc->mode and desc->signed_p. */
2084 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2085 goto fail;
2087 comp_mode = iv0.extend_mode;
2088 mode = iv0.mode;
2089 size = GET_MODE_BITSIZE (mode);
2090 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2091 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2092 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2094 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2095 goto fail;
2097 /* We can take care of the case of two induction variables chasing each other
2098 if the test is NE. I have never seen a loop using it, but still it is
2099 cool. */
2100 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2102 if (cond != NE)
2103 goto fail;
2105 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2106 iv1.step = const0_rtx;
2109 /* This is either infinite loop or the one that ends immediately, depending
2110 on initial values. Unswitching should remove this kind of conditions. */
2111 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2112 goto fail;
2114 if (cond != NE)
2116 if (iv0.step == const0_rtx)
2117 step_val = -INTVAL (iv1.step);
2118 else
2119 step_val = INTVAL (iv0.step);
2121 /* Ignore loops of while (i-- < 10) type. */
2122 if (step_val < 0)
2123 goto fail;
2125 step_is_pow2 = !(step_val & (step_val - 1));
2127 else
2129 /* We do not care about whether the step is power of two in this
2130 case. */
2131 step_is_pow2 = false;
2132 step_val = 0;
2135 /* Some more condition normalization. We must record some assumptions
2136 due to overflows. */
2137 switch (cond)
2139 case LT:
2140 case LTU:
2141 /* We want to take care only of non-sharp relationals; this is easy,
2142 as in cases the overflow would make the transformation unsafe
2143 the loop does not roll. Seemingly it would make more sense to want
2144 to take care of sharp relationals instead, as NE is more similar to
2145 them, but the problem is that here the transformation would be more
2146 difficult due to possibly infinite loops. */
2147 if (iv0.step == const0_rtx)
2149 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2150 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2151 mode_mmax);
2152 if (assumption == const_true_rtx)
2153 goto zero_iter_simplify;
2154 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2155 iv0.base, const1_rtx);
2157 else
2159 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2160 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2161 mode_mmin);
2162 if (assumption == const_true_rtx)
2163 goto zero_iter_simplify;
2164 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2165 iv1.base, constm1_rtx);
2168 if (assumption != const0_rtx)
2169 desc->noloop_assumptions =
2170 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2171 cond = (cond == LT) ? LE : LEU;
2173 /* It will be useful to be able to tell the difference once more in
2174 LE -> NE reduction. */
2175 was_sharp = true;
2176 break;
2177 default: ;
2180 /* Take care of trivially infinite loops. */
2181 if (cond != NE)
2183 if (iv0.step == const0_rtx)
2185 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2186 if (rtx_equal_p (tmp, mode_mmin))
2188 desc->infinite =
2189 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2190 /* Fill in the remaining fields somehow. */
2191 goto zero_iter_simplify;
2194 else
2196 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2197 if (rtx_equal_p (tmp, mode_mmax))
2199 desc->infinite =
2200 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2201 /* Fill in the remaining fields somehow. */
2202 goto zero_iter_simplify;
2207 /* If we can we want to take care of NE conditions instead of size
2208 comparisons, as they are much more friendly (most importantly
2209 this takes care of special handling of loops with step 1). We can
2210 do it if we first check that upper bound is greater or equal to
2211 lower bound, their difference is constant c modulo step and that
2212 there is not an overflow. */
2213 if (cond != NE)
2215 if (iv0.step == const0_rtx)
2216 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2217 else
2218 step = iv0.step;
2219 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2220 delta = lowpart_subreg (mode, delta, comp_mode);
2221 delta = simplify_gen_binary (UMOD, mode, delta, step);
2222 may_xform = const0_rtx;
2223 may_not_xform = const_true_rtx;
2225 if (GET_CODE (delta) == CONST_INT)
2227 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2229 /* A special case. We have transformed condition of type
2230 for (i = 0; i < 4; i += 4)
2231 into
2232 for (i = 0; i <= 3; i += 4)
2233 obviously if the test for overflow during that transformation
2234 passed, we cannot overflow here. Most importantly any
2235 loop with sharp end condition and step 1 falls into this
2236 category, so handling this case specially is definitely
2237 worth the troubles. */
2238 may_xform = const_true_rtx;
2240 else if (iv0.step == const0_rtx)
2242 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2243 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2244 bound = lowpart_subreg (mode, bound, comp_mode);
2245 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2246 may_xform = simplify_gen_relational (cond, SImode, mode,
2247 bound, tmp);
2248 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2249 SImode, mode,
2250 bound, tmp);
2252 else
2254 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2255 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2256 bound = lowpart_subreg (mode, bound, comp_mode);
2257 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2258 may_xform = simplify_gen_relational (cond, SImode, mode,
2259 tmp, bound);
2260 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2261 SImode, mode,
2262 tmp, bound);
2266 if (may_xform != const0_rtx)
2268 /* We perform the transformation always provided that it is not
2269 completely senseless. This is OK, as we would need this assumption
2270 to determine the number of iterations anyway. */
2271 if (may_xform != const_true_rtx)
2273 /* If the step is a power of two and the final value we have
2274 computed overflows, the cycle is infinite. Otherwise it
2275 is nontrivial to compute the number of iterations. */
2276 if (step_is_pow2)
2277 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2278 desc->infinite);
2279 else
2280 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2281 desc->assumptions);
2284 /* We are going to lose some information about upper bound on
2285 number of iterations in this step, so record the information
2286 here. */
2287 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2288 if (GET_CODE (iv1.base) == CONST_INT)
2289 up = INTVAL (iv1.base);
2290 else
2291 up = INTVAL (mode_mmax) - inc;
2292 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2293 ? iv0.base
2294 : mode_mmin);
2295 desc->niter_max = (up - down) / inc + 1;
2297 if (iv0.step == const0_rtx)
2299 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2300 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2302 else
2304 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2305 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2308 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2309 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2310 assumption = simplify_gen_relational (reverse_condition (cond),
2311 SImode, mode, tmp0, tmp1);
2312 if (assumption == const_true_rtx)
2313 goto zero_iter_simplify;
2314 else if (assumption != const0_rtx)
2315 desc->noloop_assumptions =
2316 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2317 cond = NE;
2321 /* Count the number of iterations. */
2322 if (cond == NE)
2324 /* Everything we do here is just arithmetics modulo size of mode. This
2325 makes us able to do more involved computations of number of iterations
2326 than in other cases. First transform the condition into shape
2327 s * i <> c, with s positive. */
2328 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2329 iv0.base = const0_rtx;
2330 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2331 iv1.step = const0_rtx;
2332 if (INTVAL (iv0.step) < 0)
2334 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2335 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2337 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2339 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2340 is infinite. Otherwise, the number of iterations is
2341 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2342 s = INTVAL (iv0.step); d = 1;
2343 while (s % 2 != 1)
2345 s /= 2;
2346 d *= 2;
2347 size--;
2349 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2351 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2352 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2353 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2354 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2356 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2357 inv = inverse (s, size);
2358 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2359 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2361 else
2363 if (iv1.step == const0_rtx)
2364 /* Condition in shape a + s * i <= b
2365 We must know that b + s does not overflow and a <= b + s and then we
2366 can compute number of iterations as (b + s - a) / s. (It might
2367 seem that we in fact could be more clever about testing the b + s
2368 overflow condition using some information about b - a mod s,
2369 but it was already taken into account during LE -> NE transform). */
2371 step = iv0.step;
2372 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2373 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2375 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2376 lowpart_subreg (mode, step,
2377 comp_mode));
2378 if (step_is_pow2)
2380 rtx t0, t1;
2382 /* If s is power of 2, we know that the loop is infinite if
2383 a % s <= b % s and b + s overflows. */
2384 assumption = simplify_gen_relational (reverse_condition (cond),
2385 SImode, mode,
2386 tmp1, bound);
2388 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2389 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2390 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2391 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2392 desc->infinite =
2393 alloc_EXPR_LIST (0, assumption, desc->infinite);
2395 else
2397 assumption = simplify_gen_relational (cond, SImode, mode,
2398 tmp1, bound);
2399 desc->assumptions =
2400 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2403 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2404 tmp = lowpart_subreg (mode, tmp, comp_mode);
2405 assumption = simplify_gen_relational (reverse_condition (cond),
2406 SImode, mode, tmp0, tmp);
2408 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2409 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2411 else
2413 /* Condition in shape a <= b - s * i
2414 We must know that a - s does not overflow and a - s <= b and then
2415 we can again compute number of iterations as (b - (a - s)) / s. */
2416 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2417 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2418 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2420 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2421 lowpart_subreg (mode, step, comp_mode));
2422 if (step_is_pow2)
2424 rtx t0, t1;
2426 /* If s is power of 2, we know that the loop is infinite if
2427 a % s <= b % s and a - s overflows. */
2428 assumption = simplify_gen_relational (reverse_condition (cond),
2429 SImode, mode,
2430 bound, tmp0);
2432 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2433 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2434 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2435 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2436 desc->infinite =
2437 alloc_EXPR_LIST (0, assumption, desc->infinite);
2439 else
2441 assumption = simplify_gen_relational (cond, SImode, mode,
2442 bound, tmp0);
2443 desc->assumptions =
2444 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2447 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2448 tmp = lowpart_subreg (mode, tmp, comp_mode);
2449 assumption = simplify_gen_relational (reverse_condition (cond),
2450 SImode, mode,
2451 tmp, tmp1);
2452 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2453 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2455 if (assumption == const_true_rtx)
2456 goto zero_iter_simplify;
2457 else if (assumption != const0_rtx)
2458 desc->noloop_assumptions =
2459 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2460 delta = simplify_gen_binary (UDIV, mode, delta, step);
2461 desc->niter_expr = delta;
2464 old_niter = desc->niter_expr;
2466 simplify_using_initial_values (loop, AND, &desc->assumptions);
2467 if (desc->assumptions
2468 && XEXP (desc->assumptions, 0) == const0_rtx)
2469 goto fail;
2470 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2471 simplify_using_initial_values (loop, IOR, &desc->infinite);
2472 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2474 /* Rerun the simplification. Consider code (created by copying loop headers)
2476 i = 0;
2478 if (0 < n)
2482 i++;
2483 } while (i < n);
2486 The first pass determines that i = 0, the second pass uses it to eliminate
2487 noloop assumption. */
2489 simplify_using_initial_values (loop, AND, &desc->assumptions);
2490 if (desc->assumptions
2491 && XEXP (desc->assumptions, 0) == const0_rtx)
2492 goto fail;
2493 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2494 simplify_using_initial_values (loop, IOR, &desc->infinite);
2495 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2497 if (desc->noloop_assumptions
2498 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2499 goto zero_iter;
2501 if (GET_CODE (desc->niter_expr) == CONST_INT)
2503 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2505 desc->const_iter = true;
2506 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2508 else
2510 if (!desc->niter_max)
2511 desc->niter_max = determine_max_iter (desc);
2513 /* simplify_using_initial_values does a copy propagation on the registers
2514 in the expression for the number of iterations. This prolongs life
2515 ranges of registers and increases register pressure, and usually
2516 brings no gain (and if it happens to do, the cse pass will take care
2517 of it anyway). So prevent this behavior, unless it enabled us to
2518 derive that the number of iterations is a constant. */
2519 desc->niter_expr = old_niter;
2522 return;
2524 zero_iter_simplify:
2525 /* Simplify the assumptions. */
2526 simplify_using_initial_values (loop, AND, &desc->assumptions);
2527 if (desc->assumptions
2528 && XEXP (desc->assumptions, 0) == const0_rtx)
2529 goto fail;
2530 simplify_using_initial_values (loop, IOR, &desc->infinite);
2532 /* Fallthru. */
2533 zero_iter:
2534 desc->const_iter = true;
2535 desc->niter = 0;
2536 desc->niter_max = 0;
2537 desc->noloop_assumptions = NULL_RTX;
2538 desc->niter_expr = const0_rtx;
2539 return;
2541 fail:
2542 desc->simple_p = false;
2543 return;
2546 /* Checks whether E is a simple exit from LOOP and stores its description
2547 into DESC. */
2549 static void
2550 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2552 basic_block exit_bb;
2553 rtx condition, at;
2554 edge ein;
2556 exit_bb = e->src;
2557 desc->simple_p = false;
2559 /* It must belong directly to the loop. */
2560 if (exit_bb->loop_father != loop)
2561 return;
2563 /* It must be tested (at least) once during any iteration. */
2564 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2565 return;
2567 /* It must end in a simple conditional jump. */
2568 if (!any_condjump_p (BB_END (exit_bb)))
2569 return;
2571 ein = EDGE_SUCC (exit_bb, 0);
2572 if (ein == e)
2573 ein = EDGE_SUCC (exit_bb, 1);
2575 desc->out_edge = e;
2576 desc->in_edge = ein;
2578 /* Test whether the condition is suitable. */
2579 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2580 return;
2582 if (ein->flags & EDGE_FALLTHRU)
2584 condition = reversed_condition (condition);
2585 if (!condition)
2586 return;
2589 /* Check that we are able to determine number of iterations and fill
2590 in information about it. */
2591 iv_number_of_iterations (loop, at, condition, desc);
2594 /* Finds a simple exit of LOOP and stores its description into DESC. */
2596 void
2597 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2599 unsigned i;
2600 basic_block *body;
2601 edge e;
2602 struct niter_desc act;
2603 bool any = false;
2604 edge_iterator ei;
2606 desc->simple_p = false;
2607 body = get_loop_body (loop);
2609 for (i = 0; i < loop->num_nodes; i++)
2611 FOR_EACH_EDGE (e, ei, body[i]->succs)
2613 if (flow_bb_inside_loop_p (loop, e->dest))
2614 continue;
2616 check_simple_exit (loop, e, &act);
2617 if (!act.simple_p)
2618 continue;
2620 if (!any)
2621 any = true;
2622 else
2624 /* Prefer constant iterations; the less the better. */
2625 if (!act.const_iter
2626 || (desc->const_iter && act.niter >= desc->niter))
2627 continue;
2629 /* Also if the actual exit may be infinite, while the old one
2630 not, prefer the old one. */
2631 if (act.infinite && !desc->infinite)
2632 continue;
2635 *desc = act;
2639 if (dump_file)
2641 if (desc->simple_p)
2643 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2644 fprintf (dump_file, " simple exit %d -> %d\n",
2645 desc->out_edge->src->index,
2646 desc->out_edge->dest->index);
2647 if (desc->assumptions)
2649 fprintf (dump_file, " assumptions: ");
2650 print_rtl (dump_file, desc->assumptions);
2651 fprintf (dump_file, "\n");
2653 if (desc->noloop_assumptions)
2655 fprintf (dump_file, " does not roll if: ");
2656 print_rtl (dump_file, desc->noloop_assumptions);
2657 fprintf (dump_file, "\n");
2659 if (desc->infinite)
2661 fprintf (dump_file, " infinite if: ");
2662 print_rtl (dump_file, desc->infinite);
2663 fprintf (dump_file, "\n");
2666 fprintf (dump_file, " number of iterations: ");
2667 print_rtl (dump_file, desc->niter_expr);
2668 fprintf (dump_file, "\n");
2670 fprintf (dump_file, " upper bound: ");
2671 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2672 fprintf (dump_file, "\n");
2674 else
2675 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2678 free (body);
2681 /* Creates a simple loop description of LOOP if it was not computed
2682 already. */
2684 struct niter_desc *
2685 get_simple_loop_desc (struct loop *loop)
2687 struct niter_desc *desc = simple_loop_desc (loop);
2689 if (desc)
2690 return desc;
2692 desc = xmalloc (sizeof (struct niter_desc));
2693 iv_analysis_loop_init (loop);
2694 find_simple_exit (loop, desc);
2695 loop->aux = desc;
2697 if (desc->simple_p && (desc->assumptions || desc->infinite))
2699 const char *wording;
2701 /* Assume that no overflow happens and that the loop is finite.
2702 We already warned at the tree level if we ran optimizations there. */
2703 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
2705 if (desc->infinite)
2707 wording =
2708 flag_unsafe_loop_optimizations
2709 ? N_("assuming that the loop is not infinite")
2710 : N_("cannot optimize possibly infinite loops");
2711 warning (OPT_Wunsafe_loop_optimizations, "%s",
2712 gettext (wording));
2714 if (desc->assumptions)
2716 wording =
2717 flag_unsafe_loop_optimizations
2718 ? N_("assuming that the loop counter does not overflow")
2719 : N_("cannot optimize loop, the loop counter may overflow");
2720 warning (OPT_Wunsafe_loop_optimizations, "%s",
2721 gettext (wording));
2725 if (flag_unsafe_loop_optimizations)
2727 desc->assumptions = NULL_RTX;
2728 desc->infinite = NULL_RTX;
2732 return desc;
2735 /* Releases simple loop description for LOOP. */
2737 void
2738 free_simple_loop_desc (struct loop *loop)
2740 struct niter_desc *desc = simple_loop_desc (loop);
2742 if (!desc)
2743 return;
2745 free (desc);
2746 loop->aux = NULL;