1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
37 Induction variable is then simply analyzed by walking the use-def
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
48 iv_analysis_done (); */
52 #include "coretypes.h"
55 #include "hard-reg-set.h"
56 #include "basic-block.h"
61 /* The insn information. */
68 /* The previous definition of the register defined by the single
72 /* The description of the iv. */
76 static struct insn_info
*insn_info
;
78 /* The last definition of register. */
84 static struct rtx_iv
*bivs
;
86 /* Maximal insn number for that there is place in insn_info array. */
88 static unsigned max_insn_no
;
90 /* Maximal register number for that there is place in bivs and last_def
93 static unsigned max_reg_no
;
95 /* Dumps information about IV to FILE. */
97 extern void dump_iv_info (FILE *, struct rtx_iv
*);
99 dump_iv_info (FILE *file
, struct rtx_iv
*iv
)
103 fprintf (file
, "not simple");
107 if (iv
->step
== const0_rtx
108 && !iv
->first_special
)
109 fprintf (file
, "invariant ");
111 print_rtl (file
, iv
->base
);
112 if (iv
->step
!= const0_rtx
)
114 fprintf (file
, " + ");
115 print_rtl (file
, iv
->step
);
116 fprintf (file
, " * iteration");
118 fprintf (file
, " (in %s)", GET_MODE_NAME (iv
->mode
));
120 if (iv
->mode
!= iv
->extend_mode
)
121 fprintf (file
, " %s to %s",
122 rtx_name
[iv
->extend
],
123 GET_MODE_NAME (iv
->extend_mode
));
125 if (iv
->mult
!= const1_rtx
)
127 fprintf (file
, " * ");
128 print_rtl (file
, iv
->mult
);
130 if (iv
->delta
!= const0_rtx
)
132 fprintf (file
, " + ");
133 print_rtl (file
, iv
->delta
);
135 if (iv
->first_special
)
136 fprintf (file
, " (first special)");
139 /* Assigns luids to insns in basic block BB. */
142 assign_luids (basic_block bb
)
147 FOR_BB_INSNS (bb
, insn
)
149 uid
= INSN_UID (insn
);
150 insn_info
[uid
].luid
= i
++;
151 insn_info
[uid
].prev_def
= NULL_RTX
;
152 insn_info
[uid
].iv
.analysed
= false;
156 /* Generates a subreg to get the least significant part of EXPR (in mode
157 INNER_MODE) to OUTER_MODE. */
160 lowpart_subreg (enum machine_mode outer_mode
, rtx expr
,
161 enum machine_mode inner_mode
)
163 return simplify_gen_subreg (outer_mode
, expr
, inner_mode
,
164 subreg_lowpart_offset (outer_mode
, inner_mode
));
167 /* Checks whether REG is a well-behaved register. */
170 simple_reg_p (rtx reg
)
174 if (GET_CODE (reg
) == SUBREG
)
176 if (!subreg_lowpart_p (reg
))
178 reg
= SUBREG_REG (reg
);
185 if (HARD_REGISTER_NUM_P (r
))
188 if (GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
191 if (last_def
[r
] == const0_rtx
)
197 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
200 simple_set_p (rtx lhs
, rtx rhs
)
205 || !simple_reg_p (lhs
))
208 if (CONSTANT_P (rhs
))
211 switch (GET_CODE (rhs
))
215 return simple_reg_p (rhs
);
220 return simple_reg_p (XEXP (rhs
, 0));
229 if (!simple_reg_p (op0
)
230 && !CONSTANT_P (op0
))
233 if (!simple_reg_p (op1
)
234 && !CONSTANT_P (op1
))
237 if (GET_CODE (rhs
) == MULT
239 && !CONSTANT_P (op1
))
242 if (GET_CODE (rhs
) == ASHIFT
253 /* Mark single SET in INSN. */
256 mark_single_set (rtx insn
, rtx set
)
258 rtx def
= SET_DEST (set
), src
;
261 src
= find_reg_equal_equiv_note (insn
);
267 if (!simple_set_p (SET_DEST (set
), src
))
271 uid
= INSN_UID (insn
);
273 bivs
[regno
].analysed
= false;
274 insn_info
[uid
].prev_def
= last_def
[regno
];
275 last_def
[regno
] = insn
;
280 /* Invalidate register REG unless it is equal to EXCEPT. */
283 kill_sets (rtx reg
, rtx by ATTRIBUTE_UNUSED
, void *except
)
285 if (GET_CODE (reg
) == SUBREG
)
286 reg
= SUBREG_REG (reg
);
292 last_def
[REGNO (reg
)] = const0_rtx
;
295 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
299 mark_sets (basic_block bb
, bool dom
)
303 FOR_BB_INSNS (bb
, insn
)
309 && (set
= single_set (insn
)))
310 def
= mark_single_set (insn
, set
);
314 note_stores (PATTERN (insn
), kill_sets
, def
);
318 /* Prepare the data for an induction variable analysis of a LOOP. */
321 iv_analysis_loop_init (struct loop
*loop
)
323 basic_block
*body
= get_loop_body_in_dom_order (loop
);
326 if ((unsigned) get_max_uid () >= max_insn_no
)
328 /* Add some reserve for insns and registers produced in optimizations. */
329 max_insn_no
= get_max_uid () + 100;
332 insn_info
= xmalloc (max_insn_no
* sizeof (struct insn_info
));
335 if ((unsigned) max_reg_num () >= max_reg_no
)
337 max_reg_no
= max_reg_num () + 100;
340 last_def
= xmalloc (max_reg_no
* sizeof (rtx
));
343 bivs
= xmalloc (max_reg_no
* sizeof (struct rtx_iv
));
346 memset (last_def
, 0, max_reg_num () * sizeof (rtx
));
348 for (b
= 0; b
< loop
->num_nodes
; b
++)
350 assign_luids (body
[b
]);
351 mark_sets (body
[b
], just_once_each_iteration_p (loop
, body
[b
]));
357 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
358 is returned. If INSN is before the first def in the loop, NULL_RTX is
362 iv_get_reaching_def (rtx insn
, rtx reg
)
364 unsigned regno
, luid
, auid
;
368 if (GET_CODE (reg
) == SUBREG
)
370 if (!subreg_lowpart_p (reg
))
372 reg
= SUBREG_REG (reg
);
379 || last_def
[regno
] == const0_rtx
)
380 return last_def
[regno
];
382 bb
= BLOCK_FOR_INSN (insn
);
383 luid
= insn_info
[INSN_UID (insn
)].luid
;
385 ainsn
= last_def
[regno
];
388 abb
= BLOCK_FOR_INSN (ainsn
);
390 if (dominated_by_p (CDI_DOMINATORS
, bb
, abb
))
393 auid
= INSN_UID (ainsn
);
394 ainsn
= insn_info
[auid
].prev_def
;
402 abb
= BLOCK_FOR_INSN (ainsn
);
406 auid
= INSN_UID (ainsn
);
407 if (luid
> insn_info
[auid
].luid
)
410 ainsn
= insn_info
[auid
].prev_def
;
416 /* Sets IV to invariant CST in MODE. Always returns true (just for
417 consistency with other iv manipulation functions that may fail). */
420 iv_constant (struct rtx_iv
*iv
, rtx cst
, enum machine_mode mode
)
422 if (mode
== VOIDmode
)
423 mode
= GET_MODE (cst
);
428 iv
->step
= const0_rtx
;
429 iv
->first_special
= false;
430 iv
->extend
= UNKNOWN
;
431 iv
->extend_mode
= iv
->mode
;
432 iv
->delta
= const0_rtx
;
433 iv
->mult
= const1_rtx
;
438 /* Evaluates application of subreg to MODE on IV. */
441 iv_subreg (struct rtx_iv
*iv
, enum machine_mode mode
)
443 /* If iv is invariant, just calculate the new value. */
444 if (iv
->step
== const0_rtx
445 && !iv
->first_special
)
447 rtx val
= get_iv_value (iv
, const0_rtx
);
448 val
= lowpart_subreg (mode
, val
, iv
->extend_mode
);
451 iv
->extend
= UNKNOWN
;
452 iv
->mode
= iv
->extend_mode
= mode
;
453 iv
->delta
= const0_rtx
;
454 iv
->mult
= const1_rtx
;
458 if (iv
->extend_mode
== mode
)
461 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (iv
->mode
))
464 iv
->extend
= UNKNOWN
;
467 iv
->base
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
468 simplify_gen_binary (MULT
, iv
->extend_mode
,
469 iv
->base
, iv
->mult
));
470 iv
->step
= simplify_gen_binary (MULT
, iv
->extend_mode
, iv
->step
, iv
->mult
);
471 iv
->mult
= const1_rtx
;
472 iv
->delta
= const0_rtx
;
473 iv
->first_special
= false;
478 /* Evaluates application of EXTEND to MODE on IV. */
481 iv_extend (struct rtx_iv
*iv
, enum rtx_code extend
, enum machine_mode mode
)
483 /* If iv is invariant, just calculate the new value. */
484 if (iv
->step
== const0_rtx
485 && !iv
->first_special
)
487 rtx val
= get_iv_value (iv
, const0_rtx
);
488 val
= simplify_gen_unary (extend
, mode
, val
, iv
->extend_mode
);
491 iv
->extend
= UNKNOWN
;
492 iv
->mode
= iv
->extend_mode
= mode
;
493 iv
->delta
= const0_rtx
;
494 iv
->mult
= const1_rtx
;
498 if (mode
!= iv
->extend_mode
)
501 if (iv
->extend
!= UNKNOWN
502 && iv
->extend
!= extend
)
510 /* Evaluates negation of IV. */
513 iv_neg (struct rtx_iv
*iv
)
515 if (iv
->extend
== UNKNOWN
)
517 iv
->base
= simplify_gen_unary (NEG
, iv
->extend_mode
,
518 iv
->base
, iv
->extend_mode
);
519 iv
->step
= simplify_gen_unary (NEG
, iv
->extend_mode
,
520 iv
->step
, iv
->extend_mode
);
524 iv
->delta
= simplify_gen_unary (NEG
, iv
->extend_mode
,
525 iv
->delta
, iv
->extend_mode
);
526 iv
->mult
= simplify_gen_unary (NEG
, iv
->extend_mode
,
527 iv
->mult
, iv
->extend_mode
);
533 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
536 iv_add (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
, enum rtx_code op
)
538 enum machine_mode mode
;
541 /* Extend the constant to extend_mode of the other operand if necessary. */
542 if (iv0
->extend
== UNKNOWN
543 && iv0
->mode
== iv0
->extend_mode
544 && iv0
->step
== const0_rtx
545 && GET_MODE_SIZE (iv0
->extend_mode
) < GET_MODE_SIZE (iv1
->extend_mode
))
547 iv0
->extend_mode
= iv1
->extend_mode
;
548 iv0
->base
= simplify_gen_unary (ZERO_EXTEND
, iv0
->extend_mode
,
549 iv0
->base
, iv0
->mode
);
551 if (iv1
->extend
== UNKNOWN
552 && iv1
->mode
== iv1
->extend_mode
553 && iv1
->step
== const0_rtx
554 && GET_MODE_SIZE (iv1
->extend_mode
) < GET_MODE_SIZE (iv0
->extend_mode
))
556 iv1
->extend_mode
= iv0
->extend_mode
;
557 iv1
->base
= simplify_gen_unary (ZERO_EXTEND
, iv1
->extend_mode
,
558 iv1
->base
, iv1
->mode
);
561 mode
= iv0
->extend_mode
;
562 if (mode
!= iv1
->extend_mode
)
565 if (iv0
->extend
== UNKNOWN
&& iv1
->extend
== UNKNOWN
)
567 if (iv0
->mode
!= iv1
->mode
)
570 iv0
->base
= simplify_gen_binary (op
, mode
, iv0
->base
, iv1
->base
);
571 iv0
->step
= simplify_gen_binary (op
, mode
, iv0
->step
, iv1
->step
);
576 /* Handle addition of constant. */
577 if (iv1
->extend
== UNKNOWN
579 && iv1
->step
== const0_rtx
)
581 iv0
->delta
= simplify_gen_binary (op
, mode
, iv0
->delta
, iv1
->base
);
585 if (iv0
->extend
== UNKNOWN
587 && iv0
->step
== const0_rtx
)
595 iv0
->delta
= simplify_gen_binary (PLUS
, mode
, iv0
->delta
, arg
);
602 /* Evaluates multiplication of IV by constant CST. */
605 iv_mult (struct rtx_iv
*iv
, rtx mby
)
607 enum machine_mode mode
= iv
->extend_mode
;
609 if (GET_MODE (mby
) != VOIDmode
610 && GET_MODE (mby
) != mode
)
613 if (iv
->extend
== UNKNOWN
)
615 iv
->base
= simplify_gen_binary (MULT
, mode
, iv
->base
, mby
);
616 iv
->step
= simplify_gen_binary (MULT
, mode
, iv
->step
, mby
);
620 iv
->delta
= simplify_gen_binary (MULT
, mode
, iv
->delta
, mby
);
621 iv
->mult
= simplify_gen_binary (MULT
, mode
, iv
->mult
, mby
);
627 /* Evaluates shift of IV by constant CST. */
630 iv_shift (struct rtx_iv
*iv
, rtx mby
)
632 enum machine_mode mode
= iv
->extend_mode
;
634 if (GET_MODE (mby
) != VOIDmode
635 && GET_MODE (mby
) != mode
)
638 if (iv
->extend
== UNKNOWN
)
640 iv
->base
= simplify_gen_binary (ASHIFT
, mode
, iv
->base
, mby
);
641 iv
->step
= simplify_gen_binary (ASHIFT
, mode
, iv
->step
, mby
);
645 iv
->delta
= simplify_gen_binary (ASHIFT
, mode
, iv
->delta
, mby
);
646 iv
->mult
= simplify_gen_binary (ASHIFT
, mode
, iv
->mult
, mby
);
652 /* The recursive part of get_biv_step. Gets the value of the single value
653 defined in INSN wrto initial value of REG inside loop, in shape described
657 get_biv_step_1 (rtx insn
, rtx reg
,
658 rtx
*inner_step
, enum machine_mode
*inner_mode
,
659 enum rtx_code
*extend
, enum machine_mode outer_mode
,
662 rtx set
, lhs
, rhs
, op0
= NULL_RTX
, op1
= NULL_RTX
;
663 rtx next
, nextr
, def_insn
, tmp
;
666 set
= single_set (insn
);
667 rhs
= find_reg_equal_equiv_note (insn
);
672 lhs
= SET_DEST (set
);
674 code
= GET_CODE (rhs
);
687 if (code
== PLUS
&& CONSTANT_P (op0
))
689 tmp
= op0
; op0
= op1
; op1
= tmp
;
692 if (!simple_reg_p (op0
)
693 || !CONSTANT_P (op1
))
696 if (GET_MODE (rhs
) != outer_mode
)
698 /* ppc64 uses expressions like
700 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
702 this is equivalent to
704 (set x':DI (plus:DI y:DI 1))
705 (set x:SI (subreg:SI (x':DI)). */
706 if (GET_CODE (op0
) != SUBREG
)
708 if (GET_MODE (SUBREG_REG (op0
)) != outer_mode
)
717 if (GET_MODE (rhs
) != outer_mode
)
721 if (!simple_reg_p (op0
))
731 if (GET_CODE (next
) == SUBREG
)
733 if (!subreg_lowpart_p (next
))
736 nextr
= SUBREG_REG (next
);
737 if (GET_MODE (nextr
) != outer_mode
)
743 def_insn
= iv_get_reaching_def (insn
, nextr
);
744 if (def_insn
== const0_rtx
)
749 if (!rtx_equal_p (nextr
, reg
))
752 *inner_step
= const0_rtx
;
754 *inner_mode
= outer_mode
;
755 *outer_step
= const0_rtx
;
757 else if (!get_biv_step_1 (def_insn
, reg
,
758 inner_step
, inner_mode
, extend
, outer_mode
,
762 if (GET_CODE (next
) == SUBREG
)
764 enum machine_mode amode
= GET_MODE (next
);
766 if (GET_MODE_SIZE (amode
) > GET_MODE_SIZE (*inner_mode
))
770 *inner_step
= simplify_gen_binary (PLUS
, outer_mode
,
771 *inner_step
, *outer_step
);
772 *outer_step
= const0_rtx
;
784 if (*inner_mode
== outer_mode
785 /* See comment in previous switch. */
786 || GET_MODE (rhs
) != outer_mode
)
787 *inner_step
= simplify_gen_binary (code
, outer_mode
,
790 *outer_step
= simplify_gen_binary (code
, outer_mode
,
796 if (GET_MODE (op0
) != *inner_mode
797 || *extend
!= UNKNOWN
798 || *outer_step
!= const0_rtx
)
811 /* Gets the operation on register REG inside loop, in shape
813 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
815 If the operation cannot be described in this shape, return false. */
818 get_biv_step (rtx reg
, rtx
*inner_step
, enum machine_mode
*inner_mode
,
819 enum rtx_code
*extend
, enum machine_mode
*outer_mode
,
822 *outer_mode
= GET_MODE (reg
);
824 if (!get_biv_step_1 (last_def
[REGNO (reg
)], reg
,
825 inner_step
, inner_mode
, extend
, *outer_mode
,
829 if (*inner_mode
!= *outer_mode
830 && *extend
== UNKNOWN
)
833 if (*inner_mode
== *outer_mode
834 && *extend
!= UNKNOWN
)
837 if (*inner_mode
== *outer_mode
838 && *outer_step
!= const0_rtx
)
844 /* Determines whether DEF is a biv and if so, stores its description
848 iv_analyze_biv (rtx def
, struct rtx_iv
*iv
)
851 rtx inner_step
, outer_step
;
852 enum machine_mode inner_mode
, outer_mode
;
853 enum rtx_code extend
;
857 fprintf (dump_file
, "Analysing ");
858 print_rtl (dump_file
, def
);
859 fprintf (dump_file
, " for bivness.\n");
864 if (!CONSTANT_P (def
))
867 return iv_constant (iv
, def
, VOIDmode
);
871 if (last_def
[regno
] == const0_rtx
)
874 fprintf (dump_file
, " not simple.\n");
878 if (last_def
[regno
] && bivs
[regno
].analysed
)
881 fprintf (dump_file
, " already analysed.\n");
884 return iv
->base
!= NULL_RTX
;
887 if (!last_def
[regno
])
889 iv_constant (iv
, def
, VOIDmode
);
894 if (!get_biv_step (def
, &inner_step
, &inner_mode
, &extend
,
895 &outer_mode
, &outer_step
))
901 /* Loop transforms base to es (base + inner_step) + outer_step,
902 where es means extend of subreg between inner_mode and outer_mode.
903 The corresponding induction variable is
905 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
907 iv
->base
= simplify_gen_binary (MINUS
, outer_mode
, def
, outer_step
);
908 iv
->step
= simplify_gen_binary (PLUS
, outer_mode
, inner_step
, outer_step
);
909 iv
->mode
= inner_mode
;
910 iv
->extend_mode
= outer_mode
;
912 iv
->mult
= const1_rtx
;
913 iv
->delta
= outer_step
;
914 iv
->first_special
= inner_mode
!= outer_mode
;
919 fprintf (dump_file
, " ");
920 dump_iv_info (dump_file
, iv
);
921 fprintf (dump_file
, "\n");
926 return iv
->base
!= NULL_RTX
;
929 /* Analyzes operand OP of INSN and stores the result to *IV. */
932 iv_analyze_op (rtx insn
, rtx op
, struct rtx_iv
*iv
)
936 bool inv
= CONSTANT_P (op
);
940 fprintf (dump_file
, "Analysing operand ");
941 print_rtl (dump_file
, op
);
942 fprintf (dump_file
, " of insn ");
943 print_rtl_single (dump_file
, insn
);
946 if (GET_CODE (op
) == SUBREG
)
948 if (!subreg_lowpart_p (op
))
951 if (!iv_analyze_op (insn
, SUBREG_REG (op
), iv
))
954 return iv_subreg (iv
, GET_MODE (op
));
960 if (!last_def
[regno
])
962 else if (last_def
[regno
] == const0_rtx
)
965 fprintf (dump_file
, " not simple.\n");
972 iv_constant (iv
, op
, VOIDmode
);
976 fprintf (dump_file
, " ");
977 dump_iv_info (dump_file
, iv
);
978 fprintf (dump_file
, "\n");
983 def_insn
= iv_get_reaching_def (insn
, op
);
984 if (def_insn
== const0_rtx
)
987 fprintf (dump_file
, " not simple.\n");
991 return iv_analyze (def_insn
, op
, iv
);
994 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
997 iv_analyze (rtx insn
, rtx def
, struct rtx_iv
*iv
)
1000 rtx set
, rhs
, mby
= NULL_RTX
, tmp
;
1001 rtx op0
= NULL_RTX
, op1
= NULL_RTX
;
1002 struct rtx_iv iv0
, iv1
;
1003 enum machine_mode amode
;
1006 if (insn
== const0_rtx
)
1009 if (GET_CODE (def
) == SUBREG
)
1011 if (!subreg_lowpart_p (def
))
1014 if (!iv_analyze (insn
, SUBREG_REG (def
), iv
))
1017 return iv_subreg (iv
, GET_MODE (def
));
1021 return iv_analyze_biv (def
, iv
);
1025 fprintf (dump_file
, "Analysing def of ");
1026 print_rtl (dump_file
, def
);
1027 fprintf (dump_file
, " in insn ");
1028 print_rtl_single (dump_file
, insn
);
1031 uid
= INSN_UID (insn
);
1032 if (insn_info
[uid
].iv
.analysed
)
1035 fprintf (dump_file
, " already analysed.\n");
1036 *iv
= insn_info
[uid
].iv
;
1037 return iv
->base
!= NULL_RTX
;
1040 iv
->mode
= VOIDmode
;
1041 iv
->base
= NULL_RTX
;
1042 iv
->step
= NULL_RTX
;
1044 set
= single_set (insn
);
1045 rhs
= find_reg_equal_equiv_note (insn
);
1047 rhs
= XEXP (rhs
, 0);
1049 rhs
= SET_SRC (set
);
1050 code
= GET_CODE (rhs
);
1052 if (CONSTANT_P (rhs
))
1055 amode
= GET_MODE (def
);
1062 if (!subreg_lowpart_p (rhs
))
1074 op0
= XEXP (rhs
, 0);
1079 op0
= XEXP (rhs
, 0);
1080 op1
= XEXP (rhs
, 1);
1084 op0
= XEXP (rhs
, 0);
1085 mby
= XEXP (rhs
, 1);
1086 if (!CONSTANT_P (mby
))
1088 if (!CONSTANT_P (op0
))
1097 if (CONSTANT_P (XEXP (rhs
, 0)))
1099 op0
= XEXP (rhs
, 0);
1100 mby
= XEXP (rhs
, 1);
1107 amode
= GET_MODE (rhs
);
1112 if (!iv_analyze_op (insn
, op0
, &iv0
))
1115 if (iv0
.mode
== VOIDmode
)
1118 iv0
.extend_mode
= amode
;
1124 if (!iv_analyze_op (insn
, op1
, &iv1
))
1127 if (iv1
.mode
== VOIDmode
)
1130 iv1
.extend_mode
= amode
;
1138 if (!iv_extend (&iv0
, code
, amode
))
1149 if (!iv_add (&iv0
, &iv1
, code
))
1154 if (!iv_mult (&iv0
, mby
))
1159 if (!iv_shift (&iv0
, mby
))
1170 iv
->analysed
= true;
1171 insn_info
[uid
].iv
= *iv
;
1175 print_rtl (dump_file
, def
);
1176 fprintf (dump_file
, " in insn ");
1177 print_rtl_single (dump_file
, insn
);
1178 fprintf (dump_file
, " is ");
1179 dump_iv_info (dump_file
, iv
);
1180 fprintf (dump_file
, "\n");
1183 return iv
->base
!= NULL_RTX
;
1186 /* Checks whether definition of register REG in INSN a basic induction
1187 variable. IV analysis must have been initialized (via a call to
1188 iv_analysis_loop_init) for this function to produce a result. */
1191 biv_p (rtx insn
, rtx reg
)
1198 if (last_def
[REGNO (reg
)] != insn
)
1201 return iv_analyze_biv (reg
, &iv
);
1204 /* Calculates value of IV at ITERATION-th iteration. */
1207 get_iv_value (struct rtx_iv
*iv
, rtx iteration
)
1211 /* We would need to generate some if_then_else patterns, and so far
1212 it is not needed anywhere. */
1213 if (iv
->first_special
)
1216 if (iv
->step
!= const0_rtx
&& iteration
!= const0_rtx
)
1217 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->base
,
1218 simplify_gen_binary (MULT
, iv
->extend_mode
,
1219 iv
->step
, iteration
));
1223 if (iv
->extend_mode
== iv
->mode
)
1226 val
= lowpart_subreg (iv
->mode
, val
, iv
->extend_mode
);
1228 if (iv
->extend
== UNKNOWN
)
1231 val
= simplify_gen_unary (iv
->extend
, iv
->extend_mode
, val
, iv
->mode
);
1232 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
1233 simplify_gen_binary (MULT
, iv
->extend_mode
,
1239 /* Free the data for an induction variable analysis. */
1242 iv_analysis_done (void)
1263 /* Computes inverse to X modulo (1 << MOD). */
1265 static unsigned HOST_WIDEST_INT
1266 inverse (unsigned HOST_WIDEST_INT x
, int mod
)
1268 unsigned HOST_WIDEST_INT mask
=
1269 ((unsigned HOST_WIDEST_INT
) 1 << (mod
- 1) << 1) - 1;
1270 unsigned HOST_WIDEST_INT rslt
= 1;
1273 for (i
= 0; i
< mod
- 1; i
++)
1275 rslt
= (rslt
* x
) & mask
;
1282 /* Tries to estimate the maximum number of iterations. */
1284 static unsigned HOST_WIDEST_INT
1285 determine_max_iter (struct niter_desc
*desc
)
1287 rtx niter
= desc
->niter_expr
;
1288 rtx mmin
, mmax
, left
, right
;
1289 unsigned HOST_WIDEST_INT nmax
, inc
;
1291 if (GET_CODE (niter
) == AND
1292 && GET_CODE (XEXP (niter
, 0)) == CONST_INT
)
1294 nmax
= INTVAL (XEXP (niter
, 0));
1295 if (!(nmax
& (nmax
+ 1)))
1297 desc
->niter_max
= nmax
;
1302 get_mode_bounds (desc
->mode
, desc
->signed_p
, desc
->mode
, &mmin
, &mmax
);
1303 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1305 if (GET_CODE (niter
) == UDIV
)
1307 if (GET_CODE (XEXP (niter
, 1)) != CONST_INT
)
1309 desc
->niter_max
= nmax
;
1312 inc
= INTVAL (XEXP (niter
, 1));
1313 niter
= XEXP (niter
, 0);
1318 if (GET_CODE (niter
) == PLUS
)
1320 left
= XEXP (niter
, 0);
1321 right
= XEXP (niter
, 0);
1323 if (GET_CODE (right
) == CONST_INT
)
1324 right
= GEN_INT (-INTVAL (right
));
1326 else if (GET_CODE (niter
) == MINUS
)
1328 left
= XEXP (niter
, 0);
1329 right
= XEXP (niter
, 0);
1337 if (GET_CODE (left
) == CONST_INT
)
1339 if (GET_CODE (right
) == CONST_INT
)
1341 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1343 desc
->niter_max
= nmax
/ inc
;
1347 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1350 altered_reg_used (rtx
*reg
, void *alt
)
1355 return REGNO_REG_SET_P (alt
, REGNO (*reg
));
1358 /* Marks registers altered by EXPR in set ALT. */
1361 mark_altered (rtx expr
, rtx by ATTRIBUTE_UNUSED
, void *alt
)
1363 if (GET_CODE (expr
) == SUBREG
)
1364 expr
= SUBREG_REG (expr
);
1368 SET_REGNO_REG_SET (alt
, REGNO (expr
));
1371 /* Checks whether RHS is simple enough to process. */
1374 simple_rhs_p (rtx rhs
)
1378 if (CONSTANT_P (rhs
)
1382 switch (GET_CODE (rhs
))
1386 op0
= XEXP (rhs
, 0);
1387 op1
= XEXP (rhs
, 1);
1388 /* Allow reg + const sets only. */
1389 if (REG_P (op0
) && CONSTANT_P (op1
))
1391 if (REG_P (op1
) && CONSTANT_P (op0
))
1401 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1405 simplify_using_assignment (rtx insn
, rtx
*expr
, regset altered
)
1407 rtx set
= single_set (insn
);
1408 rtx lhs
= NULL_RTX
, rhs
;
1413 lhs
= SET_DEST (set
);
1415 || altered_reg_used (&lhs
, altered
))
1421 note_stores (PATTERN (insn
), mark_altered
, altered
);
1426 /* Kill all call clobbered registers. */
1427 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1428 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1429 SET_REGNO_REG_SET (altered
, i
);
1435 rhs
= find_reg_equal_equiv_note (insn
);
1437 rhs
= XEXP (rhs
, 0);
1439 rhs
= SET_SRC (set
);
1441 if (!simple_rhs_p (rhs
))
1444 if (for_each_rtx (&rhs
, altered_reg_used
, altered
))
1447 *expr
= simplify_replace_rtx (*expr
, lhs
, rhs
);
1450 /* Checks whether A implies B. */
1453 implies_p (rtx a
, rtx b
)
1455 rtx op0
, op1
, opb0
, opb1
, r
;
1456 enum machine_mode mode
;
1458 if (GET_CODE (a
) == EQ
)
1465 r
= simplify_replace_rtx (b
, op0
, op1
);
1466 if (r
== const_true_rtx
)
1472 r
= simplify_replace_rtx (b
, op1
, op0
);
1473 if (r
== const_true_rtx
)
1478 /* A < B implies A + 1 <= B. */
1479 if ((GET_CODE (a
) == GT
|| GET_CODE (a
) == LT
)
1480 && (GET_CODE (b
) == GE
|| GET_CODE (b
) == LE
))
1487 if (GET_CODE (a
) == GT
)
1494 if (GET_CODE (b
) == GE
)
1501 mode
= GET_MODE (op0
);
1502 if (mode
!= GET_MODE (opb0
))
1504 else if (mode
== VOIDmode
)
1506 mode
= GET_MODE (op1
);
1507 if (mode
!= GET_MODE (opb1
))
1511 if (mode
!= VOIDmode
1512 && rtx_equal_p (op1
, opb1
)
1513 && simplify_gen_binary (MINUS
, mode
, opb0
, op0
) == const1_rtx
)
1520 /* Canonicalizes COND so that
1522 (1) Ensure that operands are ordered according to
1523 swap_commutative_operands_p.
1524 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1525 for GE, GEU, and LEU. */
1528 canon_condition (rtx cond
)
1533 enum machine_mode mode
;
1535 code
= GET_CODE (cond
);
1536 op0
= XEXP (cond
, 0);
1537 op1
= XEXP (cond
, 1);
1539 if (swap_commutative_operands_p (op0
, op1
))
1541 code
= swap_condition (code
);
1547 mode
= GET_MODE (op0
);
1548 if (mode
== VOIDmode
)
1549 mode
= GET_MODE (op1
);
1550 if (mode
== VOIDmode
)
1553 if (GET_CODE (op1
) == CONST_INT
1554 && GET_MODE_CLASS (mode
) != MODE_CC
1555 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1557 HOST_WIDE_INT const_val
= INTVAL (op1
);
1558 unsigned HOST_WIDE_INT uconst_val
= const_val
;
1559 unsigned HOST_WIDE_INT max_val
1560 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
);
1565 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
1566 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
1569 /* When cross-compiling, const_val might be sign-extended from
1570 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1572 if ((HOST_WIDE_INT
) (const_val
& max_val
)
1573 != (((HOST_WIDE_INT
) 1
1574 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
1575 code
= GT
, op1
= gen_int_mode (const_val
- 1, mode
);
1579 if (uconst_val
< max_val
)
1580 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, mode
);
1584 if (uconst_val
!= 0)
1585 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, mode
);
1593 if (op0
!= XEXP (cond
, 0)
1594 || op1
!= XEXP (cond
, 1)
1595 || code
!= GET_CODE (cond
)
1596 || GET_MODE (cond
) != SImode
)
1597 cond
= gen_rtx_fmt_ee (code
, SImode
, op0
, op1
);
1602 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1603 set of altered regs. */
1606 simplify_using_condition (rtx cond
, rtx
*expr
, regset altered
)
1608 rtx rev
, reve
, exp
= *expr
;
1610 if (!COMPARISON_P (exp
))
1613 /* If some register gets altered later, we do not really speak about its
1614 value at the time of comparison. */
1616 && for_each_rtx (&cond
, altered_reg_used
, altered
))
1619 rev
= reversed_condition (cond
);
1620 reve
= reversed_condition (exp
);
1622 cond
= canon_condition (cond
);
1623 exp
= canon_condition (exp
);
1625 rev
= canon_condition (rev
);
1627 reve
= canon_condition (reve
);
1629 if (rtx_equal_p (exp
, cond
))
1631 *expr
= const_true_rtx
;
1636 if (rev
&& rtx_equal_p (exp
, rev
))
1642 if (implies_p (cond
, exp
))
1644 *expr
= const_true_rtx
;
1648 if (reve
&& implies_p (cond
, reve
))
1654 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1656 if (rev
&& implies_p (exp
, rev
))
1662 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1663 if (rev
&& reve
&& implies_p (reve
, rev
))
1665 *expr
= const_true_rtx
;
1669 /* We would like to have some other tests here. TODO. */
1674 /* Use relationship between A and *B to eventually eliminate *B.
1675 OP is the operation we consider. */
1678 eliminate_implied_condition (enum rtx_code op
, rtx a
, rtx
*b
)
1682 /* If A implies *B, we may replace *B by true. */
1683 if (implies_p (a
, *b
))
1684 *b
= const_true_rtx
;
1688 /* If *B implies A, we may replace *B by false. */
1689 if (implies_p (*b
, a
))
1696 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1697 operation we consider. */
1700 eliminate_implied_conditions (enum rtx_code op
, rtx
*head
, rtx tail
)
1704 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1705 eliminate_implied_condition (op
, *head
, &XEXP (elt
, 0));
1706 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1707 eliminate_implied_condition (op
, XEXP (elt
, 0), head
);
1710 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1711 is a list, its elements are assumed to be combined using OP. */
1714 simplify_using_initial_values (struct loop
*loop
, enum rtx_code op
, rtx
*expr
)
1716 rtx head
, tail
, insn
;
1719 regset_head altered_head
;
1725 if (CONSTANT_P (*expr
))
1728 if (GET_CODE (*expr
) == EXPR_LIST
)
1730 head
= XEXP (*expr
, 0);
1731 tail
= XEXP (*expr
, 1);
1733 eliminate_implied_conditions (op
, &head
, tail
);
1737 neutral
= const_true_rtx
;
1742 neutral
= const0_rtx
;
1743 aggr
= const_true_rtx
;
1748 simplify_using_initial_values (loop
, UNKNOWN
, &head
);
1751 XEXP (*expr
, 0) = aggr
;
1752 XEXP (*expr
, 1) = NULL_RTX
;
1755 else if (head
== neutral
)
1758 simplify_using_initial_values (loop
, op
, expr
);
1761 simplify_using_initial_values (loop
, op
, &tail
);
1763 if (tail
&& XEXP (tail
, 0) == aggr
)
1769 XEXP (*expr
, 0) = head
;
1770 XEXP (*expr
, 1) = tail
;
1777 e
= loop_preheader_edge (loop
);
1778 if (e
->src
== ENTRY_BLOCK_PTR
)
1781 altered
= INITIALIZE_REG_SET (altered_head
);
1787 insn
= BB_END (e
->src
);
1788 if (any_condjump_p (insn
))
1790 rtx cond
= get_condition (BB_END (e
->src
), NULL
, false, true);
1792 if (cond
&& (e
->flags
& EDGE_FALLTHRU
))
1793 cond
= reversed_condition (cond
);
1796 simplify_using_condition (cond
, expr
, altered
);
1797 if (CONSTANT_P (*expr
))
1799 FREE_REG_SET (altered
);
1805 FOR_BB_INSNS_REVERSE (e
->src
, insn
)
1810 simplify_using_assignment (insn
, expr
, altered
);
1811 if (CONSTANT_P (*expr
))
1813 FREE_REG_SET (altered
);
1818 /* This is a bit subtle. Store away e->src in tmp_bb, since we
1819 modify `e' and this can invalidate the subsequent count of
1820 e->src's predecessors by looking at the wrong block. */
1822 e
= EDGE_PRED (tmp_bb
, 0);
1823 if (EDGE_COUNT (tmp_bb
->preds
) > 1
1824 || e
->src
== ENTRY_BLOCK_PTR
)
1828 FREE_REG_SET (altered
);
1831 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1832 that IV occurs as left operands of comparison COND and its signedness
1833 is SIGNED_P to DESC. */
1836 shorten_into_mode (struct rtx_iv
*iv
, enum machine_mode mode
,
1837 enum rtx_code cond
, bool signed_p
, struct niter_desc
*desc
)
1839 rtx mmin
, mmax
, cond_over
, cond_under
;
1841 get_mode_bounds (mode
, signed_p
, iv
->extend_mode
, &mmin
, &mmax
);
1842 cond_under
= simplify_gen_relational (LT
, SImode
, iv
->extend_mode
,
1844 cond_over
= simplify_gen_relational (GT
, SImode
, iv
->extend_mode
,
1853 if (cond_under
!= const0_rtx
)
1855 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1856 if (cond_over
!= const0_rtx
)
1857 desc
->noloop_assumptions
=
1858 alloc_EXPR_LIST (0, cond_over
, desc
->noloop_assumptions
);
1865 if (cond_over
!= const0_rtx
)
1867 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1868 if (cond_under
!= const0_rtx
)
1869 desc
->noloop_assumptions
=
1870 alloc_EXPR_LIST (0, cond_under
, desc
->noloop_assumptions
);
1874 if (cond_over
!= const0_rtx
)
1876 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1877 if (cond_under
!= const0_rtx
)
1879 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1887 iv
->extend
= signed_p
? SIGN_EXTEND
: ZERO_EXTEND
;
1890 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1891 subregs of the same mode if possible (sometimes it is necessary to add
1892 some assumptions to DESC). */
1895 canonicalize_iv_subregs (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
,
1896 enum rtx_code cond
, struct niter_desc
*desc
)
1898 enum machine_mode comp_mode
;
1901 /* If the ivs behave specially in the first iteration, or are
1902 added/multiplied after extending, we ignore them. */
1903 if (iv0
->first_special
|| iv0
->mult
!= const1_rtx
|| iv0
->delta
!= const0_rtx
)
1905 if (iv1
->first_special
|| iv1
->mult
!= const1_rtx
|| iv1
->delta
!= const0_rtx
)
1908 /* If there is some extend, it must match signedness of the comparison. */
1913 if (iv0
->extend
== ZERO_EXTEND
1914 || iv1
->extend
== ZERO_EXTEND
)
1921 if (iv0
->extend
== SIGN_EXTEND
1922 || iv1
->extend
== SIGN_EXTEND
)
1928 if (iv0
->extend
!= UNKNOWN
1929 && iv1
->extend
!= UNKNOWN
1930 && iv0
->extend
!= iv1
->extend
)
1934 if (iv0
->extend
!= UNKNOWN
)
1935 signed_p
= iv0
->extend
== SIGN_EXTEND
;
1936 if (iv1
->extend
!= UNKNOWN
)
1937 signed_p
= iv1
->extend
== SIGN_EXTEND
;
1944 /* Values of both variables should be computed in the same mode. These
1945 might indeed be different, if we have comparison like
1947 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1949 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1950 in different modes. This does not seem impossible to handle, but
1951 it hardly ever occurs in practice.
1953 The only exception is the case when one of operands is invariant.
1954 For example pentium 3 generates comparisons like
1955 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1956 definitely do not want this prevent the optimization. */
1957 comp_mode
= iv0
->extend_mode
;
1958 if (GET_MODE_BITSIZE (comp_mode
) < GET_MODE_BITSIZE (iv1
->extend_mode
))
1959 comp_mode
= iv1
->extend_mode
;
1961 if (iv0
->extend_mode
!= comp_mode
)
1963 if (iv0
->mode
!= iv0
->extend_mode
1964 || iv0
->step
!= const0_rtx
)
1967 iv0
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1968 comp_mode
, iv0
->base
, iv0
->mode
);
1969 iv0
->extend_mode
= comp_mode
;
1972 if (iv1
->extend_mode
!= comp_mode
)
1974 if (iv1
->mode
!= iv1
->extend_mode
1975 || iv1
->step
!= const0_rtx
)
1978 iv1
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1979 comp_mode
, iv1
->base
, iv1
->mode
);
1980 iv1
->extend_mode
= comp_mode
;
1983 /* Check that both ivs belong to a range of a single mode. If one of the
1984 operands is an invariant, we may need to shorten it into the common
1986 if (iv0
->mode
== iv0
->extend_mode
1987 && iv0
->step
== const0_rtx
1988 && iv0
->mode
!= iv1
->mode
)
1989 shorten_into_mode (iv0
, iv1
->mode
, cond
, signed_p
, desc
);
1991 if (iv1
->mode
== iv1
->extend_mode
1992 && iv1
->step
== const0_rtx
1993 && iv0
->mode
!= iv1
->mode
)
1994 shorten_into_mode (iv1
, iv0
->mode
, swap_condition (cond
), signed_p
, desc
);
1996 if (iv0
->mode
!= iv1
->mode
)
1999 desc
->mode
= iv0
->mode
;
2000 desc
->signed_p
= signed_p
;
2005 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2006 the result into DESC. Very similar to determine_number_of_iterations
2007 (basically its rtl version), complicated by things like subregs. */
2010 iv_number_of_iterations (struct loop
*loop
, rtx insn
, rtx condition
,
2011 struct niter_desc
*desc
)
2013 rtx op0
, op1
, delta
, step
, bound
, may_xform
, def_insn
, tmp
, tmp0
, tmp1
;
2014 struct rtx_iv iv0
, iv1
, tmp_iv
;
2015 rtx assumption
, may_not_xform
;
2017 enum machine_mode mode
, comp_mode
;
2018 rtx mmin
, mmax
, mode_mmin
, mode_mmax
;
2019 unsigned HOST_WIDEST_INT s
, size
, d
, inv
;
2020 HOST_WIDEST_INT up
, down
, inc
;
2021 int was_sharp
= false;
2024 /* The meaning of these assumptions is this:
2026 then the rest of information does not have to be valid
2027 if noloop_assumptions then the loop does not roll
2028 if infinite then this exit is never used */
2030 desc
->assumptions
= NULL_RTX
;
2031 desc
->noloop_assumptions
= NULL_RTX
;
2032 desc
->infinite
= NULL_RTX
;
2033 desc
->simple_p
= true;
2035 desc
->const_iter
= false;
2036 desc
->niter_expr
= NULL_RTX
;
2037 desc
->niter_max
= 0;
2039 cond
= GET_CODE (condition
);
2040 if (!COMPARISON_P (condition
))
2043 mode
= GET_MODE (XEXP (condition
, 0));
2044 if (mode
== VOIDmode
)
2045 mode
= GET_MODE (XEXP (condition
, 1));
2046 /* The constant comparisons should be folded. */
2047 if (mode
== VOIDmode
)
2050 /* We only handle integers or pointers. */
2051 if (GET_MODE_CLASS (mode
) != MODE_INT
2052 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
2055 op0
= XEXP (condition
, 0);
2056 def_insn
= iv_get_reaching_def (insn
, op0
);
2057 if (!iv_analyze (def_insn
, op0
, &iv0
))
2059 if (iv0
.extend_mode
== VOIDmode
)
2060 iv0
.mode
= iv0
.extend_mode
= mode
;
2062 op1
= XEXP (condition
, 1);
2063 def_insn
= iv_get_reaching_def (insn
, op1
);
2064 if (!iv_analyze (def_insn
, op1
, &iv1
))
2066 if (iv1
.extend_mode
== VOIDmode
)
2067 iv1
.mode
= iv1
.extend_mode
= mode
;
2069 if (GET_MODE_BITSIZE (iv0
.extend_mode
) > HOST_BITS_PER_WIDE_INT
2070 || GET_MODE_BITSIZE (iv1
.extend_mode
) > HOST_BITS_PER_WIDE_INT
)
2073 /* Check condition and normalize it. */
2081 tmp_iv
= iv0
; iv0
= iv1
; iv1
= tmp_iv
;
2082 cond
= swap_condition (cond
);
2094 /* Handle extends. This is relatively nontrivial, so we only try in some
2095 easy cases, when we can canonicalize the ivs (possibly by adding some
2096 assumptions) to shape subreg (base + i * step). This function also fills
2097 in desc->mode and desc->signed_p. */
2099 if (!canonicalize_iv_subregs (&iv0
, &iv1
, cond
, desc
))
2102 comp_mode
= iv0
.extend_mode
;
2104 size
= GET_MODE_BITSIZE (mode
);
2105 get_mode_bounds (mode
, (cond
== LE
|| cond
== LT
), comp_mode
, &mmin
, &mmax
);
2106 mode_mmin
= lowpart_subreg (mode
, mmin
, comp_mode
);
2107 mode_mmax
= lowpart_subreg (mode
, mmax
, comp_mode
);
2109 if (GET_CODE (iv0
.step
) != CONST_INT
|| GET_CODE (iv1
.step
) != CONST_INT
)
2112 /* We can take care of the case of two induction variables chasing each other
2113 if the test is NE. I have never seen a loop using it, but still it is
2115 if (iv0
.step
!= const0_rtx
&& iv1
.step
!= const0_rtx
)
2120 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2121 iv1
.step
= const0_rtx
;
2124 /* This is either infinite loop or the one that ends immediately, depending
2125 on initial values. Unswitching should remove this kind of conditions. */
2126 if (iv0
.step
== const0_rtx
&& iv1
.step
== const0_rtx
)
2129 /* Ignore loops of while (i-- < 10) type. */
2131 && (INTVAL (iv0
.step
) < 0 || INTVAL (iv1
.step
) > 0))
2134 /* Some more condition normalization. We must record some assumptions
2135 due to overflows. */
2140 /* We want to take care only of non-sharp relationals; this is easy,
2141 as in cases the overflow would make the transformation unsafe
2142 the loop does not roll. Seemingly it would make more sense to want
2143 to take care of sharp relationals instead, as NE is more similar to
2144 them, but the problem is that here the transformation would be more
2145 difficult due to possibly infinite loops. */
2146 if (iv0
.step
== const0_rtx
)
2148 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2149 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2151 if (assumption
== const_true_rtx
)
2153 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2154 iv0
.base
, const1_rtx
);
2158 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2159 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2161 if (assumption
== const_true_rtx
)
2163 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2164 iv1
.base
, constm1_rtx
);
2167 if (assumption
!= const0_rtx
)
2168 desc
->noloop_assumptions
=
2169 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2170 cond
= (cond
== LT
) ? LE
: LEU
;
2172 /* It will be useful to be able to tell the difference once more in
2173 LE -> NE reduction. */
2179 /* Take care of trivially infinite loops. */
2182 if (iv0
.step
== const0_rtx
)
2184 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2185 if (rtx_equal_p (tmp
, mode_mmin
))
2188 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2194 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2195 if (rtx_equal_p (tmp
, mode_mmax
))
2198 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2204 /* If we can we want to take care of NE conditions instead of size
2205 comparisons, as they are much more friendly (most importantly
2206 this takes care of special handling of loops with step 1). We can
2207 do it if we first check that upper bound is greater or equal to
2208 lower bound, their difference is constant c modulo step and that
2209 there is not an overflow. */
2212 if (iv0
.step
== const0_rtx
)
2213 step
= simplify_gen_unary (NEG
, comp_mode
, iv1
.step
, comp_mode
);
2216 delta
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2217 delta
= lowpart_subreg (mode
, delta
, comp_mode
);
2218 delta
= simplify_gen_binary (UMOD
, mode
, delta
, step
);
2219 may_xform
= const0_rtx
;
2220 may_not_xform
= const_true_rtx
;
2222 if (GET_CODE (delta
) == CONST_INT
)
2224 if (was_sharp
&& INTVAL (delta
) == INTVAL (step
) - 1)
2226 /* A special case. We have transformed condition of type
2227 for (i = 0; i < 4; i += 4)
2229 for (i = 0; i <= 3; i += 4)
2230 obviously if the test for overflow during that transformation
2231 passed, we cannot overflow here. Most importantly any
2232 loop with sharp end condition and step 1 falls into this
2233 category, so handling this case specially is definitely
2234 worth the troubles. */
2235 may_xform
= const_true_rtx
;
2237 else if (iv0
.step
== const0_rtx
)
2239 bound
= simplify_gen_binary (PLUS
, comp_mode
, mmin
, step
);
2240 bound
= simplify_gen_binary (MINUS
, comp_mode
, bound
, delta
);
2241 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2242 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2243 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2245 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2251 bound
= simplify_gen_binary (MINUS
, comp_mode
, mmax
, step
);
2252 bound
= simplify_gen_binary (PLUS
, comp_mode
, bound
, delta
);
2253 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2254 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2255 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2257 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2263 if (may_xform
!= const0_rtx
)
2265 /* We perform the transformation always provided that it is not
2266 completely senseless. This is OK, as we would need this assumption
2267 to determine the number of iterations anyway. */
2268 if (may_xform
!= const_true_rtx
)
2270 /* If the step is a power of two and the final value we have
2271 computed overflows, the cycle is infinite. Otherwise it
2272 is nontrivial to compute the number of iterations. */
2274 if ((s
& (s
- 1)) == 0)
2275 desc
->infinite
= alloc_EXPR_LIST (0, may_not_xform
,
2278 desc
->assumptions
= alloc_EXPR_LIST (0, may_xform
,
2282 /* We are going to lose some information about upper bound on
2283 number of iterations in this step, so record the information
2285 inc
= INTVAL (iv0
.step
) - INTVAL (iv1
.step
);
2286 if (GET_CODE (iv1
.base
) == CONST_INT
)
2287 up
= INTVAL (iv1
.base
);
2289 up
= INTVAL (mode_mmax
) - inc
;
2290 down
= INTVAL (GET_CODE (iv0
.base
) == CONST_INT
2293 desc
->niter_max
= (up
- down
) / inc
+ 1;
2295 if (iv0
.step
== const0_rtx
)
2297 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, delta
);
2298 iv0
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.base
, step
);
2302 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, delta
);
2303 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, step
);
2306 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2307 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2308 assumption
= simplify_gen_relational (reverse_condition (cond
),
2309 SImode
, mode
, tmp0
, tmp1
);
2310 if (assumption
== const_true_rtx
)
2312 else if (assumption
!= const0_rtx
)
2313 desc
->noloop_assumptions
=
2314 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2319 /* Count the number of iterations. */
2322 /* Everything we do here is just arithmetics modulo size of mode. This
2323 makes us able to do more involved computations of number of iterations
2324 than in other cases. First transform the condition into shape
2325 s * i <> c, with s positive. */
2326 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2327 iv0
.base
= const0_rtx
;
2328 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2329 iv1
.step
= const0_rtx
;
2330 if (INTVAL (iv0
.step
) < 0)
2332 iv0
.step
= simplify_gen_unary (NEG
, comp_mode
, iv0
.step
, mode
);
2333 iv1
.base
= simplify_gen_unary (NEG
, comp_mode
, iv1
.base
, mode
);
2335 iv0
.step
= lowpart_subreg (mode
, iv0
.step
, comp_mode
);
2337 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2338 is infinite. Otherwise, the number of iterations is
2339 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2340 s
= INTVAL (iv0
.step
); d
= 1;
2347 bound
= GEN_INT (((unsigned HOST_WIDEST_INT
) 1 << (size
- 1 ) << 1) - 1);
2349 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2350 tmp
= simplify_gen_binary (UMOD
, mode
, tmp1
, GEN_INT (d
));
2351 assumption
= simplify_gen_relational (NE
, SImode
, mode
, tmp
, const0_rtx
);
2352 desc
->infinite
= alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2354 tmp
= simplify_gen_binary (UDIV
, mode
, tmp1
, GEN_INT (d
));
2355 inv
= inverse (s
, size
);
2356 inv
= trunc_int_for_mode (inv
, mode
);
2357 tmp
= simplify_gen_binary (MULT
, mode
, tmp
, GEN_INT (inv
));
2358 desc
->niter_expr
= simplify_gen_binary (AND
, mode
, tmp
, bound
);
2362 if (iv1
.step
== const0_rtx
)
2363 /* Condition in shape a + s * i <= b
2364 We must know that b + s does not overflow and a <= b + s and then we
2365 can compute number of iterations as (b + s - a) / s. (It might
2366 seem that we in fact could be more clever about testing the b + s
2367 overflow condition using some information about b - a mod s,
2368 but it was already taken into account during LE -> NE transform). */
2371 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2372 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2374 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmax
,
2375 lowpart_subreg (mode
, step
, comp_mode
));
2376 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2379 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2381 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, iv0
.step
);
2382 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2383 assumption
= simplify_gen_relational (reverse_condition (cond
),
2384 SImode
, mode
, tmp0
, tmp
);
2386 delta
= simplify_gen_binary (PLUS
, mode
, tmp1
, step
);
2387 delta
= simplify_gen_binary (MINUS
, mode
, delta
, tmp0
);
2391 /* Condition in shape a <= b - s * i
2392 We must know that a - s does not overflow and a - s <= b and then
2393 we can again compute number of iterations as (b - (a - s)) / s. */
2394 step
= simplify_gen_unary (NEG
, mode
, iv1
.step
, mode
);
2395 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2396 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2398 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmin
,
2399 lowpart_subreg (mode
, step
, comp_mode
));
2400 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2403 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2405 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, iv1
.step
);
2406 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2407 assumption
= simplify_gen_relational (reverse_condition (cond
),
2410 delta
= simplify_gen_binary (MINUS
, mode
, tmp0
, step
);
2411 delta
= simplify_gen_binary (MINUS
, mode
, tmp1
, delta
);
2413 if (assumption
== const_true_rtx
)
2415 else if (assumption
!= const0_rtx
)
2416 desc
->noloop_assumptions
=
2417 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2418 delta
= simplify_gen_binary (UDIV
, mode
, delta
, step
);
2419 desc
->niter_expr
= delta
;
2422 old_niter
= desc
->niter_expr
;
2424 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2425 if (desc
->assumptions
2426 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2428 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2429 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2430 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2432 /* Rerun the simplification. Consider code (created by copying loop headers)
2444 The first pass determines that i = 0, the second pass uses it to eliminate
2445 noloop assumption. */
2447 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2448 if (desc
->assumptions
2449 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2451 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2452 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2453 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2455 if (desc
->noloop_assumptions
2456 && XEXP (desc
->noloop_assumptions
, 0) == const_true_rtx
)
2459 if (GET_CODE (desc
->niter_expr
) == CONST_INT
)
2461 unsigned HOST_WIDEST_INT val
= INTVAL (desc
->niter_expr
);
2463 desc
->const_iter
= true;
2464 desc
->niter_max
= desc
->niter
= val
& GET_MODE_MASK (desc
->mode
);
2468 if (!desc
->niter_max
)
2469 desc
->niter_max
= determine_max_iter (desc
);
2471 /* simplify_using_initial_values does a copy propagation on the registers
2472 in the expression for the number of iterations. This prolongs life
2473 ranges of registers and increases register pressure, and usually
2474 brings no gain (and if it happens to do, the cse pass will take care
2475 of it anyway). So prevent this behavior, unless it enabled us to
2476 derive that the number of iterations is a constant. */
2477 desc
->niter_expr
= old_niter
;
2483 desc
->simple_p
= false;
2487 desc
->const_iter
= true;
2489 desc
->niter_max
= 0;
2490 desc
->niter_expr
= const0_rtx
;
2494 /* Checks whether E is a simple exit from LOOP and stores its description
2498 check_simple_exit (struct loop
*loop
, edge e
, struct niter_desc
*desc
)
2500 basic_block exit_bb
;
2505 desc
->simple_p
= false;
2507 /* It must belong directly to the loop. */
2508 if (exit_bb
->loop_father
!= loop
)
2511 /* It must be tested (at least) once during any iteration. */
2512 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit_bb
))
2515 /* It must end in a simple conditional jump. */
2516 if (!any_condjump_p (BB_END (exit_bb
)))
2519 ein
= EDGE_SUCC (exit_bb
, 0);
2521 ein
= EDGE_SUCC (exit_bb
, 1);
2524 desc
->in_edge
= ein
;
2526 /* Test whether the condition is suitable. */
2527 if (!(condition
= get_condition (BB_END (ein
->src
), &at
, false, false)))
2530 if (ein
->flags
& EDGE_FALLTHRU
)
2532 condition
= reversed_condition (condition
);
2537 /* Check that we are able to determine number of iterations and fill
2538 in information about it. */
2539 iv_number_of_iterations (loop
, at
, condition
, desc
);
2542 /* Finds a simple exit of LOOP and stores its description into DESC. */
2545 find_simple_exit (struct loop
*loop
, struct niter_desc
*desc
)
2550 struct niter_desc act
;
2554 desc
->simple_p
= false;
2555 body
= get_loop_body (loop
);
2557 for (i
= 0; i
< loop
->num_nodes
; i
++)
2559 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
2561 if (flow_bb_inside_loop_p (loop
, e
->dest
))
2564 check_simple_exit (loop
, e
, &act
);
2568 /* Prefer constant iterations; the less the better. */
2571 else if (!act
.const_iter
2572 || (desc
->const_iter
&& act
.niter
>= desc
->niter
))
2582 fprintf (dump_file
, "Loop %d is simple:\n", loop
->num
);
2583 fprintf (dump_file
, " simple exit %d -> %d\n",
2584 desc
->out_edge
->src
->index
,
2585 desc
->out_edge
->dest
->index
);
2586 if (desc
->assumptions
)
2588 fprintf (dump_file
, " assumptions: ");
2589 print_rtl (dump_file
, desc
->assumptions
);
2590 fprintf (dump_file
, "\n");
2592 if (desc
->noloop_assumptions
)
2594 fprintf (dump_file
, " does not roll if: ");
2595 print_rtl (dump_file
, desc
->noloop_assumptions
);
2596 fprintf (dump_file
, "\n");
2600 fprintf (dump_file
, " infinite if: ");
2601 print_rtl (dump_file
, desc
->infinite
);
2602 fprintf (dump_file
, "\n");
2605 fprintf (dump_file
, " number of iterations: ");
2606 print_rtl (dump_file
, desc
->niter_expr
);
2607 fprintf (dump_file
, "\n");
2609 fprintf (dump_file
, " upper bound: ");
2610 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
, desc
->niter_max
);
2611 fprintf (dump_file
, "\n");
2614 fprintf (dump_file
, "Loop %d is not simple.\n", loop
->num
);
2620 /* Creates a simple loop description of LOOP if it was not computed
2624 get_simple_loop_desc (struct loop
*loop
)
2626 struct niter_desc
*desc
= simple_loop_desc (loop
);
2631 desc
= xmalloc (sizeof (struct niter_desc
));
2632 iv_analysis_loop_init (loop
);
2633 find_simple_exit (loop
, desc
);
2639 /* Releases simple loop description for LOOP. */
2642 free_simple_loop_desc (struct loop
*loop
)
2644 struct niter_desc
*desc
= simple_loop_desc (loop
);