1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
37 Induction variable is then simply analyzed by walking the use-def
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
48 iv_analysis_done (); */
52 #include "coretypes.h"
55 #include "hard-reg-set.h"
57 #include "basic-block.h"
62 /* The insn information. */
69 /* The previous definition of the register defined by the single
73 /* The description of the iv. */
77 static struct insn_info
*insn_info
;
79 /* The last definition of register. */
85 static struct rtx_iv
*bivs
;
87 /* Maximal insn number for that there is place in insn_info array. */
89 static unsigned max_insn_no
;
91 /* Maximal register number for that there is place in bivs and last_def
94 static unsigned max_reg_no
;
96 /* Dumps information about IV to FILE. */
98 extern void dump_iv_info (FILE *, struct rtx_iv
*);
100 dump_iv_info (FILE *file
, struct rtx_iv
*iv
)
104 fprintf (file
, "not simple");
108 if (iv
->step
== const0_rtx
109 && !iv
->first_special
)
110 fprintf (file
, "invariant ");
112 print_rtl (file
, iv
->base
);
113 if (iv
->step
!= const0_rtx
)
115 fprintf (file
, " + ");
116 print_rtl (file
, iv
->step
);
117 fprintf (file
, " * iteration");
119 fprintf (file
, " (in %s)", GET_MODE_NAME (iv
->mode
));
121 if (iv
->mode
!= iv
->extend_mode
)
122 fprintf (file
, " %s to %s",
123 rtx_name
[iv
->extend
],
124 GET_MODE_NAME (iv
->extend_mode
));
126 if (iv
->mult
!= const1_rtx
)
128 fprintf (file
, " * ");
129 print_rtl (file
, iv
->mult
);
131 if (iv
->delta
!= const0_rtx
)
133 fprintf (file
, " + ");
134 print_rtl (file
, iv
->delta
);
136 if (iv
->first_special
)
137 fprintf (file
, " (first special)");
140 /* Assigns luids to insns in basic block BB. */
143 assign_luids (basic_block bb
)
148 FOR_BB_INSNS (bb
, insn
)
150 uid
= INSN_UID (insn
);
151 insn_info
[uid
].luid
= i
++;
152 insn_info
[uid
].prev_def
= NULL_RTX
;
153 insn_info
[uid
].iv
.analysed
= false;
157 /* Generates a subreg to get the least significant part of EXPR (in mode
158 INNER_MODE) to OUTER_MODE. */
161 lowpart_subreg (enum machine_mode outer_mode
, rtx expr
,
162 enum machine_mode inner_mode
)
164 return simplify_gen_subreg (outer_mode
, expr
, inner_mode
,
165 subreg_lowpart_offset (outer_mode
, inner_mode
));
168 /* Checks whether REG is a well-behaved register. */
171 simple_reg_p (rtx reg
)
175 if (GET_CODE (reg
) == SUBREG
)
177 if (!subreg_lowpart_p (reg
))
179 reg
= SUBREG_REG (reg
);
186 if (HARD_REGISTER_NUM_P (r
))
189 if (GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
192 if (last_def
[r
] == const0_rtx
)
198 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
201 simple_set_p (rtx lhs
, rtx rhs
)
206 || !simple_reg_p (lhs
))
209 if (CONSTANT_P (rhs
))
212 switch (GET_CODE (rhs
))
216 return simple_reg_p (rhs
);
221 return simple_reg_p (XEXP (rhs
, 0));
230 if (!simple_reg_p (op0
)
231 && !CONSTANT_P (op0
))
234 if (!simple_reg_p (op1
)
235 && !CONSTANT_P (op1
))
238 if (GET_CODE (rhs
) == MULT
240 && !CONSTANT_P (op1
))
243 if (GET_CODE (rhs
) == ASHIFT
254 /* Mark single SET in INSN. */
257 mark_single_set (rtx insn
, rtx set
)
259 rtx def
= SET_DEST (set
), src
;
262 src
= find_reg_equal_equiv_note (insn
);
268 if (!simple_set_p (SET_DEST (set
), src
))
272 uid
= INSN_UID (insn
);
274 bivs
[regno
].analysed
= false;
275 insn_info
[uid
].prev_def
= last_def
[regno
];
276 last_def
[regno
] = insn
;
281 /* Invalidate register REG unless it is equal to EXCEPT. */
284 kill_sets (rtx reg
, rtx by ATTRIBUTE_UNUSED
, void *except
)
286 if (GET_CODE (reg
) == SUBREG
)
287 reg
= SUBREG_REG (reg
);
293 last_def
[REGNO (reg
)] = const0_rtx
;
296 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
300 mark_sets (basic_block bb
, bool dom
)
304 FOR_BB_INSNS (bb
, insn
)
310 && (set
= single_set (insn
)))
311 def
= mark_single_set (insn
, set
);
315 note_stores (PATTERN (insn
), kill_sets
, def
);
319 /* Prepare the data for an induction variable analysis of a LOOP. */
322 iv_analysis_loop_init (struct loop
*loop
)
324 basic_block
*body
= get_loop_body_in_dom_order (loop
);
327 if ((unsigned) get_max_uid () >= max_insn_no
)
329 /* Add some reserve for insns and registers produced in optimizations. */
330 max_insn_no
= get_max_uid () + 100;
333 insn_info
= xmalloc (max_insn_no
* sizeof (struct insn_info
));
336 if ((unsigned) max_reg_num () >= max_reg_no
)
338 max_reg_no
= max_reg_num () + 100;
341 last_def
= xmalloc (max_reg_no
* sizeof (rtx
));
344 bivs
= xmalloc (max_reg_no
* sizeof (struct rtx_iv
));
347 memset (last_def
, 0, max_reg_num () * sizeof (rtx
));
349 for (b
= 0; b
< loop
->num_nodes
; b
++)
351 assign_luids (body
[b
]);
352 mark_sets (body
[b
], just_once_each_iteration_p (loop
, body
[b
]));
358 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
359 is returned. If INSN is before the first def in the loop, NULL_RTX is
363 iv_get_reaching_def (rtx insn
, rtx reg
)
365 unsigned regno
, luid
, auid
;
369 if (GET_CODE (reg
) == SUBREG
)
371 if (!subreg_lowpart_p (reg
))
373 reg
= SUBREG_REG (reg
);
380 || last_def
[regno
] == const0_rtx
)
381 return last_def
[regno
];
383 bb
= BLOCK_FOR_INSN (insn
);
384 luid
= insn_info
[INSN_UID (insn
)].luid
;
386 ainsn
= last_def
[regno
];
389 abb
= BLOCK_FOR_INSN (ainsn
);
391 if (dominated_by_p (CDI_DOMINATORS
, bb
, abb
))
394 auid
= INSN_UID (ainsn
);
395 ainsn
= insn_info
[auid
].prev_def
;
403 abb
= BLOCK_FOR_INSN (ainsn
);
407 auid
= INSN_UID (ainsn
);
408 if (luid
> insn_info
[auid
].luid
)
411 ainsn
= insn_info
[auid
].prev_def
;
417 /* Sets IV to invariant CST in MODE. Always returns true (just for
418 consistency with other iv manipulation functions that may fail). */
421 iv_constant (struct rtx_iv
*iv
, rtx cst
, enum machine_mode mode
)
423 if (mode
== VOIDmode
)
424 mode
= GET_MODE (cst
);
429 iv
->step
= const0_rtx
;
430 iv
->first_special
= false;
431 iv
->extend
= UNKNOWN
;
432 iv
->extend_mode
= iv
->mode
;
433 iv
->delta
= const0_rtx
;
434 iv
->mult
= const1_rtx
;
439 /* Evaluates application of subreg to MODE on IV. */
442 iv_subreg (struct rtx_iv
*iv
, enum machine_mode mode
)
444 /* If iv is invariant, just calculate the new value. */
445 if (iv
->step
== const0_rtx
446 && !iv
->first_special
)
448 rtx val
= get_iv_value (iv
, const0_rtx
);
449 val
= lowpart_subreg (mode
, val
, iv
->extend_mode
);
452 iv
->extend
= UNKNOWN
;
453 iv
->mode
= iv
->extend_mode
= mode
;
454 iv
->delta
= const0_rtx
;
455 iv
->mult
= const1_rtx
;
459 if (iv
->extend_mode
== mode
)
462 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (iv
->mode
))
465 iv
->extend
= UNKNOWN
;
468 iv
->base
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
469 simplify_gen_binary (MULT
, iv
->extend_mode
,
470 iv
->base
, iv
->mult
));
471 iv
->step
= simplify_gen_binary (MULT
, iv
->extend_mode
, iv
->step
, iv
->mult
);
472 iv
->mult
= const1_rtx
;
473 iv
->delta
= const0_rtx
;
474 iv
->first_special
= false;
479 /* Evaluates application of EXTEND to MODE on IV. */
482 iv_extend (struct rtx_iv
*iv
, enum rtx_code extend
, enum machine_mode mode
)
484 /* If iv is invariant, just calculate the new value. */
485 if (iv
->step
== const0_rtx
486 && !iv
->first_special
)
488 rtx val
= get_iv_value (iv
, const0_rtx
);
489 val
= simplify_gen_unary (extend
, mode
, val
, iv
->extend_mode
);
492 iv
->extend
= UNKNOWN
;
493 iv
->mode
= iv
->extend_mode
= mode
;
494 iv
->delta
= const0_rtx
;
495 iv
->mult
= const1_rtx
;
499 if (mode
!= iv
->extend_mode
)
502 if (iv
->extend
!= UNKNOWN
503 && iv
->extend
!= extend
)
511 /* Evaluates negation of IV. */
514 iv_neg (struct rtx_iv
*iv
)
516 if (iv
->extend
== UNKNOWN
)
518 iv
->base
= simplify_gen_unary (NEG
, iv
->extend_mode
,
519 iv
->base
, iv
->extend_mode
);
520 iv
->step
= simplify_gen_unary (NEG
, iv
->extend_mode
,
521 iv
->step
, iv
->extend_mode
);
525 iv
->delta
= simplify_gen_unary (NEG
, iv
->extend_mode
,
526 iv
->delta
, iv
->extend_mode
);
527 iv
->mult
= simplify_gen_unary (NEG
, iv
->extend_mode
,
528 iv
->mult
, iv
->extend_mode
);
534 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
537 iv_add (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
, enum rtx_code op
)
539 enum machine_mode mode
;
542 /* Extend the constant to extend_mode of the other operand if necessary. */
543 if (iv0
->extend
== UNKNOWN
544 && iv0
->mode
== iv0
->extend_mode
545 && iv0
->step
== const0_rtx
546 && GET_MODE_SIZE (iv0
->extend_mode
) < GET_MODE_SIZE (iv1
->extend_mode
))
548 iv0
->extend_mode
= iv1
->extend_mode
;
549 iv0
->base
= simplify_gen_unary (ZERO_EXTEND
, iv0
->extend_mode
,
550 iv0
->base
, iv0
->mode
);
552 if (iv1
->extend
== UNKNOWN
553 && iv1
->mode
== iv1
->extend_mode
554 && iv1
->step
== const0_rtx
555 && GET_MODE_SIZE (iv1
->extend_mode
) < GET_MODE_SIZE (iv0
->extend_mode
))
557 iv1
->extend_mode
= iv0
->extend_mode
;
558 iv1
->base
= simplify_gen_unary (ZERO_EXTEND
, iv1
->extend_mode
,
559 iv1
->base
, iv1
->mode
);
562 mode
= iv0
->extend_mode
;
563 if (mode
!= iv1
->extend_mode
)
566 if (iv0
->extend
== UNKNOWN
&& iv1
->extend
== UNKNOWN
)
568 if (iv0
->mode
!= iv1
->mode
)
571 iv0
->base
= simplify_gen_binary (op
, mode
, iv0
->base
, iv1
->base
);
572 iv0
->step
= simplify_gen_binary (op
, mode
, iv0
->step
, iv1
->step
);
577 /* Handle addition of constant. */
578 if (iv1
->extend
== UNKNOWN
580 && iv1
->step
== const0_rtx
)
582 iv0
->delta
= simplify_gen_binary (op
, mode
, iv0
->delta
, iv1
->base
);
586 if (iv0
->extend
== UNKNOWN
588 && iv0
->step
== const0_rtx
)
596 iv0
->delta
= simplify_gen_binary (PLUS
, mode
, iv0
->delta
, arg
);
603 /* Evaluates multiplication of IV by constant CST. */
606 iv_mult (struct rtx_iv
*iv
, rtx mby
)
608 enum machine_mode mode
= iv
->extend_mode
;
610 if (GET_MODE (mby
) != VOIDmode
611 && GET_MODE (mby
) != mode
)
614 if (iv
->extend
== UNKNOWN
)
616 iv
->base
= simplify_gen_binary (MULT
, mode
, iv
->base
, mby
);
617 iv
->step
= simplify_gen_binary (MULT
, mode
, iv
->step
, mby
);
621 iv
->delta
= simplify_gen_binary (MULT
, mode
, iv
->delta
, mby
);
622 iv
->mult
= simplify_gen_binary (MULT
, mode
, iv
->mult
, mby
);
628 /* Evaluates shift of IV by constant CST. */
631 iv_shift (struct rtx_iv
*iv
, rtx mby
)
633 enum machine_mode mode
= iv
->extend_mode
;
635 if (GET_MODE (mby
) != VOIDmode
636 && GET_MODE (mby
) != mode
)
639 if (iv
->extend
== UNKNOWN
)
641 iv
->base
= simplify_gen_binary (ASHIFT
, mode
, iv
->base
, mby
);
642 iv
->step
= simplify_gen_binary (ASHIFT
, mode
, iv
->step
, mby
);
646 iv
->delta
= simplify_gen_binary (ASHIFT
, mode
, iv
->delta
, mby
);
647 iv
->mult
= simplify_gen_binary (ASHIFT
, mode
, iv
->mult
, mby
);
653 /* The recursive part of get_biv_step. Gets the value of the single value
654 defined in INSN wrto initial value of REG inside loop, in shape described
658 get_biv_step_1 (rtx insn
, rtx reg
,
659 rtx
*inner_step
, enum machine_mode
*inner_mode
,
660 enum rtx_code
*extend
, enum machine_mode outer_mode
,
663 rtx set
, lhs
, rhs
, op0
= NULL_RTX
, op1
= NULL_RTX
;
664 rtx next
, nextr
, def_insn
, tmp
;
667 set
= single_set (insn
);
668 rhs
= find_reg_equal_equiv_note (insn
);
673 lhs
= SET_DEST (set
);
675 code
= GET_CODE (rhs
);
688 if (code
== PLUS
&& CONSTANT_P (op0
))
690 tmp
= op0
; op0
= op1
; op1
= tmp
;
693 if (!simple_reg_p (op0
)
694 || !CONSTANT_P (op1
))
697 if (GET_MODE (rhs
) != outer_mode
)
699 /* ppc64 uses expressions like
701 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
703 this is equivalent to
705 (set x':DI (plus:DI y:DI 1))
706 (set x:SI (subreg:SI (x':DI)). */
707 if (GET_CODE (op0
) != SUBREG
)
709 if (GET_MODE (SUBREG_REG (op0
)) != outer_mode
)
718 if (GET_MODE (rhs
) != outer_mode
)
722 if (!simple_reg_p (op0
))
732 if (GET_CODE (next
) == SUBREG
)
734 if (!subreg_lowpart_p (next
))
737 nextr
= SUBREG_REG (next
);
738 if (GET_MODE (nextr
) != outer_mode
)
744 def_insn
= iv_get_reaching_def (insn
, nextr
);
745 if (def_insn
== const0_rtx
)
750 if (!rtx_equal_p (nextr
, reg
))
753 *inner_step
= const0_rtx
;
755 *inner_mode
= outer_mode
;
756 *outer_step
= const0_rtx
;
758 else if (!get_biv_step_1 (def_insn
, reg
,
759 inner_step
, inner_mode
, extend
, outer_mode
,
763 if (GET_CODE (next
) == SUBREG
)
765 enum machine_mode amode
= GET_MODE (next
);
767 if (GET_MODE_SIZE (amode
) > GET_MODE_SIZE (*inner_mode
))
771 *inner_step
= simplify_gen_binary (PLUS
, outer_mode
,
772 *inner_step
, *outer_step
);
773 *outer_step
= const0_rtx
;
785 if (*inner_mode
== outer_mode
786 /* See comment in previous switch. */
787 || GET_MODE (rhs
) != outer_mode
)
788 *inner_step
= simplify_gen_binary (code
, outer_mode
,
791 *outer_step
= simplify_gen_binary (code
, outer_mode
,
797 if (GET_MODE (op0
) != *inner_mode
798 || *extend
!= UNKNOWN
799 || *outer_step
!= const0_rtx
)
812 /* Gets the operation on register REG inside loop, in shape
814 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
816 If the operation cannot be described in this shape, return false. */
819 get_biv_step (rtx reg
, rtx
*inner_step
, enum machine_mode
*inner_mode
,
820 enum rtx_code
*extend
, enum machine_mode
*outer_mode
,
823 *outer_mode
= GET_MODE (reg
);
825 if (!get_biv_step_1 (last_def
[REGNO (reg
)], reg
,
826 inner_step
, inner_mode
, extend
, *outer_mode
,
830 if (*inner_mode
!= *outer_mode
831 && *extend
== UNKNOWN
)
834 if (*inner_mode
== *outer_mode
835 && *extend
!= UNKNOWN
)
838 if (*inner_mode
== *outer_mode
839 && *outer_step
!= const0_rtx
)
845 /* Determines whether DEF is a biv and if so, stores its description
849 iv_analyze_biv (rtx def
, struct rtx_iv
*iv
)
852 rtx inner_step
, outer_step
;
853 enum machine_mode inner_mode
, outer_mode
;
854 enum rtx_code extend
;
858 fprintf (dump_file
, "Analysing ");
859 print_rtl (dump_file
, def
);
860 fprintf (dump_file
, " for bivness.\n");
865 if (!CONSTANT_P (def
))
868 return iv_constant (iv
, def
, VOIDmode
);
872 if (last_def
[regno
] == const0_rtx
)
875 fprintf (dump_file
, " not simple.\n");
879 if (last_def
[regno
] && bivs
[regno
].analysed
)
882 fprintf (dump_file
, " already analysed.\n");
885 return iv
->base
!= NULL_RTX
;
888 if (!last_def
[regno
])
890 iv_constant (iv
, def
, VOIDmode
);
895 if (!get_biv_step (def
, &inner_step
, &inner_mode
, &extend
,
896 &outer_mode
, &outer_step
))
902 /* Loop transforms base to es (base + inner_step) + outer_step,
903 where es means extend of subreg between inner_mode and outer_mode.
904 The corresponding induction variable is
906 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
908 iv
->base
= simplify_gen_binary (MINUS
, outer_mode
, def
, outer_step
);
909 iv
->step
= simplify_gen_binary (PLUS
, outer_mode
, inner_step
, outer_step
);
910 iv
->mode
= inner_mode
;
911 iv
->extend_mode
= outer_mode
;
913 iv
->mult
= const1_rtx
;
914 iv
->delta
= outer_step
;
915 iv
->first_special
= inner_mode
!= outer_mode
;
920 fprintf (dump_file
, " ");
921 dump_iv_info (dump_file
, iv
);
922 fprintf (dump_file
, "\n");
927 return iv
->base
!= NULL_RTX
;
930 /* Analyzes operand OP of INSN and stores the result to *IV. */
933 iv_analyze_op (rtx insn
, rtx op
, struct rtx_iv
*iv
)
937 bool inv
= CONSTANT_P (op
);
941 fprintf (dump_file
, "Analysing operand ");
942 print_rtl (dump_file
, op
);
943 fprintf (dump_file
, " of insn ");
944 print_rtl_single (dump_file
, insn
);
947 if (GET_CODE (op
) == SUBREG
)
949 if (!subreg_lowpart_p (op
))
952 if (!iv_analyze_op (insn
, SUBREG_REG (op
), iv
))
955 return iv_subreg (iv
, GET_MODE (op
));
961 if (!last_def
[regno
])
963 else if (last_def
[regno
] == const0_rtx
)
966 fprintf (dump_file
, " not simple.\n");
973 iv_constant (iv
, op
, VOIDmode
);
977 fprintf (dump_file
, " ");
978 dump_iv_info (dump_file
, iv
);
979 fprintf (dump_file
, "\n");
984 def_insn
= iv_get_reaching_def (insn
, op
);
985 if (def_insn
== const0_rtx
)
988 fprintf (dump_file
, " not simple.\n");
992 return iv_analyze (def_insn
, op
, iv
);
995 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
998 iv_analyze (rtx insn
, rtx def
, struct rtx_iv
*iv
)
1001 rtx set
, rhs
, mby
= NULL_RTX
, tmp
;
1002 rtx op0
= NULL_RTX
, op1
= NULL_RTX
;
1003 struct rtx_iv iv0
, iv1
;
1004 enum machine_mode amode
;
1007 if (insn
== const0_rtx
)
1010 if (GET_CODE (def
) == SUBREG
)
1012 if (!subreg_lowpart_p (def
))
1015 if (!iv_analyze (insn
, SUBREG_REG (def
), iv
))
1018 return iv_subreg (iv
, GET_MODE (def
));
1022 return iv_analyze_biv (def
, iv
);
1026 fprintf (dump_file
, "Analysing def of ");
1027 print_rtl (dump_file
, def
);
1028 fprintf (dump_file
, " in insn ");
1029 print_rtl_single (dump_file
, insn
);
1032 uid
= INSN_UID (insn
);
1033 if (insn_info
[uid
].iv
.analysed
)
1036 fprintf (dump_file
, " already analysed.\n");
1037 *iv
= insn_info
[uid
].iv
;
1038 return iv
->base
!= NULL_RTX
;
1041 iv
->mode
= VOIDmode
;
1042 iv
->base
= NULL_RTX
;
1043 iv
->step
= NULL_RTX
;
1045 set
= single_set (insn
);
1046 rhs
= find_reg_equal_equiv_note (insn
);
1048 rhs
= XEXP (rhs
, 0);
1050 rhs
= SET_SRC (set
);
1051 code
= GET_CODE (rhs
);
1053 if (CONSTANT_P (rhs
))
1056 amode
= GET_MODE (def
);
1063 if (!subreg_lowpart_p (rhs
))
1075 op0
= XEXP (rhs
, 0);
1080 op0
= XEXP (rhs
, 0);
1081 op1
= XEXP (rhs
, 1);
1085 op0
= XEXP (rhs
, 0);
1086 mby
= XEXP (rhs
, 1);
1087 if (!CONSTANT_P (mby
))
1089 if (!CONSTANT_P (op0
))
1098 if (CONSTANT_P (XEXP (rhs
, 0)))
1100 op0
= XEXP (rhs
, 0);
1101 mby
= XEXP (rhs
, 1);
1108 amode
= GET_MODE (rhs
);
1113 if (!iv_analyze_op (insn
, op0
, &iv0
))
1116 if (iv0
.mode
== VOIDmode
)
1119 iv0
.extend_mode
= amode
;
1125 if (!iv_analyze_op (insn
, op1
, &iv1
))
1128 if (iv1
.mode
== VOIDmode
)
1131 iv1
.extend_mode
= amode
;
1139 if (!iv_extend (&iv0
, code
, amode
))
1150 if (!iv_add (&iv0
, &iv1
, code
))
1155 if (!iv_mult (&iv0
, mby
))
1160 if (!iv_shift (&iv0
, mby
))
1171 iv
->analysed
= true;
1172 insn_info
[uid
].iv
= *iv
;
1176 print_rtl (dump_file
, def
);
1177 fprintf (dump_file
, " in insn ");
1178 print_rtl_single (dump_file
, insn
);
1179 fprintf (dump_file
, " is ");
1180 dump_iv_info (dump_file
, iv
);
1181 fprintf (dump_file
, "\n");
1184 return iv
->base
!= NULL_RTX
;
1187 /* Checks whether definition of register REG in INSN a basic induction
1188 variable. IV analysis must have been initialized (via a call to
1189 iv_analysis_loop_init) for this function to produce a result. */
1192 biv_p (rtx insn
, rtx reg
)
1199 if (last_def
[REGNO (reg
)] != insn
)
1202 return iv_analyze_biv (reg
, &iv
);
1205 /* Calculates value of IV at ITERATION-th iteration. */
1208 get_iv_value (struct rtx_iv
*iv
, rtx iteration
)
1212 /* We would need to generate some if_then_else patterns, and so far
1213 it is not needed anywhere. */
1214 if (iv
->first_special
)
1217 if (iv
->step
!= const0_rtx
&& iteration
!= const0_rtx
)
1218 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->base
,
1219 simplify_gen_binary (MULT
, iv
->extend_mode
,
1220 iv
->step
, iteration
));
1224 if (iv
->extend_mode
== iv
->mode
)
1227 val
= lowpart_subreg (iv
->mode
, val
, iv
->extend_mode
);
1229 if (iv
->extend
== UNKNOWN
)
1232 val
= simplify_gen_unary (iv
->extend
, iv
->extend_mode
, val
, iv
->mode
);
1233 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
1234 simplify_gen_binary (MULT
, iv
->extend_mode
,
1240 /* Free the data for an induction variable analysis. */
1243 iv_analysis_done (void)
1264 /* Computes inverse to X modulo (1 << MOD). */
1266 static unsigned HOST_WIDEST_INT
1267 inverse (unsigned HOST_WIDEST_INT x
, int mod
)
1269 unsigned HOST_WIDEST_INT mask
=
1270 ((unsigned HOST_WIDEST_INT
) 1 << (mod
- 1) << 1) - 1;
1271 unsigned HOST_WIDEST_INT rslt
= 1;
1274 for (i
= 0; i
< mod
- 1; i
++)
1276 rslt
= (rslt
* x
) & mask
;
1283 /* Tries to estimate the maximum number of iterations. */
1285 static unsigned HOST_WIDEST_INT
1286 determine_max_iter (struct niter_desc
*desc
)
1288 rtx niter
= desc
->niter_expr
;
1289 rtx mmin
, mmax
, left
, right
;
1290 unsigned HOST_WIDEST_INT nmax
, inc
;
1292 if (GET_CODE (niter
) == AND
1293 && GET_CODE (XEXP (niter
, 0)) == CONST_INT
)
1295 nmax
= INTVAL (XEXP (niter
, 0));
1296 if (!(nmax
& (nmax
+ 1)))
1298 desc
->niter_max
= nmax
;
1303 get_mode_bounds (desc
->mode
, desc
->signed_p
, desc
->mode
, &mmin
, &mmax
);
1304 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1306 if (GET_CODE (niter
) == UDIV
)
1308 if (GET_CODE (XEXP (niter
, 1)) != CONST_INT
)
1310 desc
->niter_max
= nmax
;
1313 inc
= INTVAL (XEXP (niter
, 1));
1314 niter
= XEXP (niter
, 0);
1319 if (GET_CODE (niter
) == PLUS
)
1321 left
= XEXP (niter
, 0);
1322 right
= XEXP (niter
, 0);
1324 if (GET_CODE (right
) == CONST_INT
)
1325 right
= GEN_INT (-INTVAL (right
));
1327 else if (GET_CODE (niter
) == MINUS
)
1329 left
= XEXP (niter
, 0);
1330 right
= XEXP (niter
, 0);
1338 if (GET_CODE (left
) == CONST_INT
)
1340 if (GET_CODE (right
) == CONST_INT
)
1342 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1344 desc
->niter_max
= nmax
/ inc
;
1348 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1351 altered_reg_used (rtx
*reg
, void *alt
)
1356 return REGNO_REG_SET_P (alt
, REGNO (*reg
));
1359 /* Marks registers altered by EXPR in set ALT. */
1362 mark_altered (rtx expr
, rtx by ATTRIBUTE_UNUSED
, void *alt
)
1364 if (GET_CODE (expr
) == SUBREG
)
1365 expr
= SUBREG_REG (expr
);
1369 SET_REGNO_REG_SET (alt
, REGNO (expr
));
1372 /* Checks whether RHS is simple enough to process. */
1375 simple_rhs_p (rtx rhs
)
1379 if (CONSTANT_P (rhs
)
1383 switch (GET_CODE (rhs
))
1387 op0
= XEXP (rhs
, 0);
1388 op1
= XEXP (rhs
, 1);
1389 /* Allow reg + const sets only. */
1390 if (REG_P (op0
) && CONSTANT_P (op1
))
1392 if (REG_P (op1
) && CONSTANT_P (op0
))
1402 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1406 simplify_using_assignment (rtx insn
, rtx
*expr
, regset altered
)
1408 rtx set
= single_set (insn
);
1409 rtx lhs
= NULL_RTX
, rhs
;
1414 lhs
= SET_DEST (set
);
1416 || altered_reg_used (&lhs
, altered
))
1422 note_stores (PATTERN (insn
), mark_altered
, altered
);
1427 /* Kill all call clobbered registers. */
1428 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1429 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1430 SET_REGNO_REG_SET (altered
, i
);
1436 rhs
= find_reg_equal_equiv_note (insn
);
1438 rhs
= XEXP (rhs
, 0);
1440 rhs
= SET_SRC (set
);
1442 if (!simple_rhs_p (rhs
))
1445 if (for_each_rtx (&rhs
, altered_reg_used
, altered
))
1448 *expr
= simplify_replace_rtx (*expr
, lhs
, rhs
);
1451 /* Checks whether A implies B. */
1454 implies_p (rtx a
, rtx b
)
1456 rtx op0
, op1
, opb0
, opb1
, r
;
1457 enum machine_mode mode
;
1459 if (GET_CODE (a
) == EQ
)
1466 r
= simplify_replace_rtx (b
, op0
, op1
);
1467 if (r
== const_true_rtx
)
1473 r
= simplify_replace_rtx (b
, op1
, op0
);
1474 if (r
== const_true_rtx
)
1479 /* A < B implies A + 1 <= B. */
1480 if ((GET_CODE (a
) == GT
|| GET_CODE (a
) == LT
)
1481 && (GET_CODE (b
) == GE
|| GET_CODE (b
) == LE
))
1488 if (GET_CODE (a
) == GT
)
1495 if (GET_CODE (b
) == GE
)
1502 mode
= GET_MODE (op0
);
1503 if (mode
!= GET_MODE (opb0
))
1505 else if (mode
== VOIDmode
)
1507 mode
= GET_MODE (op1
);
1508 if (mode
!= GET_MODE (opb1
))
1512 if (mode
!= VOIDmode
1513 && rtx_equal_p (op1
, opb1
)
1514 && simplify_gen_binary (MINUS
, mode
, opb0
, op0
) == const1_rtx
)
1521 /* Canonicalizes COND so that
1523 (1) Ensure that operands are ordered according to
1524 swap_commutative_operands_p.
1525 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1526 for GE, GEU, and LEU. */
1529 canon_condition (rtx cond
)
1534 enum machine_mode mode
;
1536 code
= GET_CODE (cond
);
1537 op0
= XEXP (cond
, 0);
1538 op1
= XEXP (cond
, 1);
1540 if (swap_commutative_operands_p (op0
, op1
))
1542 code
= swap_condition (code
);
1548 mode
= GET_MODE (op0
);
1549 if (mode
== VOIDmode
)
1550 mode
= GET_MODE (op1
);
1551 if (mode
== VOIDmode
)
1554 if (GET_CODE (op1
) == CONST_INT
1555 && GET_MODE_CLASS (mode
) != MODE_CC
1556 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1558 HOST_WIDE_INT const_val
= INTVAL (op1
);
1559 unsigned HOST_WIDE_INT uconst_val
= const_val
;
1560 unsigned HOST_WIDE_INT max_val
1561 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
);
1566 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
1567 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
1570 /* When cross-compiling, const_val might be sign-extended from
1571 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1573 if ((HOST_WIDE_INT
) (const_val
& max_val
)
1574 != (((HOST_WIDE_INT
) 1
1575 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
1576 code
= GT
, op1
= gen_int_mode (const_val
- 1, mode
);
1580 if (uconst_val
< max_val
)
1581 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, mode
);
1585 if (uconst_val
!= 0)
1586 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, mode
);
1594 if (op0
!= XEXP (cond
, 0)
1595 || op1
!= XEXP (cond
, 1)
1596 || code
!= GET_CODE (cond
)
1597 || GET_MODE (cond
) != SImode
)
1598 cond
= gen_rtx_fmt_ee (code
, SImode
, op0
, op1
);
1603 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1604 set of altered regs. */
1607 simplify_using_condition (rtx cond
, rtx
*expr
, regset altered
)
1609 rtx rev
, reve
, exp
= *expr
;
1611 if (!COMPARISON_P (exp
))
1614 /* If some register gets altered later, we do not really speak about its
1615 value at the time of comparison. */
1617 && for_each_rtx (&cond
, altered_reg_used
, altered
))
1620 rev
= reversed_condition (cond
);
1621 reve
= reversed_condition (exp
);
1623 cond
= canon_condition (cond
);
1624 exp
= canon_condition (exp
);
1626 rev
= canon_condition (rev
);
1628 reve
= canon_condition (reve
);
1630 if (rtx_equal_p (exp
, cond
))
1632 *expr
= const_true_rtx
;
1637 if (rev
&& rtx_equal_p (exp
, rev
))
1643 if (implies_p (cond
, exp
))
1645 *expr
= const_true_rtx
;
1649 if (reve
&& implies_p (cond
, reve
))
1655 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1657 if (rev
&& implies_p (exp
, rev
))
1663 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1664 if (rev
&& reve
&& implies_p (reve
, rev
))
1666 *expr
= const_true_rtx
;
1670 /* We would like to have some other tests here. TODO. */
1675 /* Use relationship between A and *B to eventually eliminate *B.
1676 OP is the operation we consider. */
1679 eliminate_implied_condition (enum rtx_code op
, rtx a
, rtx
*b
)
1683 /* If A implies *B, we may replace *B by true. */
1684 if (implies_p (a
, *b
))
1685 *b
= const_true_rtx
;
1689 /* If *B implies A, we may replace *B by false. */
1690 if (implies_p (*b
, a
))
1697 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1698 operation we consider. */
1701 eliminate_implied_conditions (enum rtx_code op
, rtx
*head
, rtx tail
)
1705 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1706 eliminate_implied_condition (op
, *head
, &XEXP (elt
, 0));
1707 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1708 eliminate_implied_condition (op
, XEXP (elt
, 0), head
);
1711 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1712 is a list, its elements are assumed to be combined using OP. */
1715 simplify_using_initial_values (struct loop
*loop
, enum rtx_code op
, rtx
*expr
)
1717 rtx head
, tail
, insn
;
1725 if (CONSTANT_P (*expr
))
1728 if (GET_CODE (*expr
) == EXPR_LIST
)
1730 head
= XEXP (*expr
, 0);
1731 tail
= XEXP (*expr
, 1);
1733 eliminate_implied_conditions (op
, &head
, tail
);
1737 neutral
= const_true_rtx
;
1742 neutral
= const0_rtx
;
1743 aggr
= const_true_rtx
;
1748 simplify_using_initial_values (loop
, UNKNOWN
, &head
);
1751 XEXP (*expr
, 0) = aggr
;
1752 XEXP (*expr
, 1) = NULL_RTX
;
1755 else if (head
== neutral
)
1758 simplify_using_initial_values (loop
, op
, expr
);
1761 simplify_using_initial_values (loop
, op
, &tail
);
1763 if (tail
&& XEXP (tail
, 0) == aggr
)
1769 XEXP (*expr
, 0) = head
;
1770 XEXP (*expr
, 1) = tail
;
1777 e
= loop_preheader_edge (loop
);
1778 if (e
->src
== ENTRY_BLOCK_PTR
)
1781 altered
= ALLOC_REG_SET (®_obstack
);
1787 insn
= BB_END (e
->src
);
1788 if (any_condjump_p (insn
))
1790 rtx cond
= get_condition (BB_END (e
->src
), NULL
, false, true);
1792 if (cond
&& (e
->flags
& EDGE_FALLTHRU
))
1793 cond
= reversed_condition (cond
);
1796 simplify_using_condition (cond
, expr
, altered
);
1797 if (CONSTANT_P (*expr
))
1799 FREE_REG_SET (altered
);
1805 FOR_BB_INSNS_REVERSE (e
->src
, insn
)
1810 simplify_using_assignment (insn
, expr
, altered
);
1811 if (CONSTANT_P (*expr
))
1813 FREE_REG_SET (altered
);
1818 /* This is a bit subtle. Store away e->src in tmp_bb, since we
1819 modify `e' and this can invalidate the subsequent count of
1820 e->src's predecessors by looking at the wrong block. */
1822 e
= EDGE_PRED (tmp_bb
, 0);
1823 if (EDGE_COUNT (tmp_bb
->preds
) > 1
1824 || e
->src
== ENTRY_BLOCK_PTR
)
1828 FREE_REG_SET (altered
);
1831 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1832 that IV occurs as left operands of comparison COND and its signedness
1833 is SIGNED_P to DESC. */
1836 shorten_into_mode (struct rtx_iv
*iv
, enum machine_mode mode
,
1837 enum rtx_code cond
, bool signed_p
, struct niter_desc
*desc
)
1839 rtx mmin
, mmax
, cond_over
, cond_under
;
1841 get_mode_bounds (mode
, signed_p
, iv
->extend_mode
, &mmin
, &mmax
);
1842 cond_under
= simplify_gen_relational (LT
, SImode
, iv
->extend_mode
,
1844 cond_over
= simplify_gen_relational (GT
, SImode
, iv
->extend_mode
,
1853 if (cond_under
!= const0_rtx
)
1855 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1856 if (cond_over
!= const0_rtx
)
1857 desc
->noloop_assumptions
=
1858 alloc_EXPR_LIST (0, cond_over
, desc
->noloop_assumptions
);
1865 if (cond_over
!= const0_rtx
)
1867 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1868 if (cond_under
!= const0_rtx
)
1869 desc
->noloop_assumptions
=
1870 alloc_EXPR_LIST (0, cond_under
, desc
->noloop_assumptions
);
1874 if (cond_over
!= const0_rtx
)
1876 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1877 if (cond_under
!= const0_rtx
)
1879 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1887 iv
->extend
= signed_p
? SIGN_EXTEND
: ZERO_EXTEND
;
1890 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1891 subregs of the same mode if possible (sometimes it is necessary to add
1892 some assumptions to DESC). */
1895 canonicalize_iv_subregs (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
,
1896 enum rtx_code cond
, struct niter_desc
*desc
)
1898 enum machine_mode comp_mode
;
1901 /* If the ivs behave specially in the first iteration, or are
1902 added/multiplied after extending, we ignore them. */
1903 if (iv0
->first_special
|| iv0
->mult
!= const1_rtx
|| iv0
->delta
!= const0_rtx
)
1905 if (iv1
->first_special
|| iv1
->mult
!= const1_rtx
|| iv1
->delta
!= const0_rtx
)
1908 /* If there is some extend, it must match signedness of the comparison. */
1913 if (iv0
->extend
== ZERO_EXTEND
1914 || iv1
->extend
== ZERO_EXTEND
)
1921 if (iv0
->extend
== SIGN_EXTEND
1922 || iv1
->extend
== SIGN_EXTEND
)
1928 if (iv0
->extend
!= UNKNOWN
1929 && iv1
->extend
!= UNKNOWN
1930 && iv0
->extend
!= iv1
->extend
)
1934 if (iv0
->extend
!= UNKNOWN
)
1935 signed_p
= iv0
->extend
== SIGN_EXTEND
;
1936 if (iv1
->extend
!= UNKNOWN
)
1937 signed_p
= iv1
->extend
== SIGN_EXTEND
;
1944 /* Values of both variables should be computed in the same mode. These
1945 might indeed be different, if we have comparison like
1947 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1949 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1950 in different modes. This does not seem impossible to handle, but
1951 it hardly ever occurs in practice.
1953 The only exception is the case when one of operands is invariant.
1954 For example pentium 3 generates comparisons like
1955 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1956 definitely do not want this prevent the optimization. */
1957 comp_mode
= iv0
->extend_mode
;
1958 if (GET_MODE_BITSIZE (comp_mode
) < GET_MODE_BITSIZE (iv1
->extend_mode
))
1959 comp_mode
= iv1
->extend_mode
;
1961 if (iv0
->extend_mode
!= comp_mode
)
1963 if (iv0
->mode
!= iv0
->extend_mode
1964 || iv0
->step
!= const0_rtx
)
1967 iv0
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1968 comp_mode
, iv0
->base
, iv0
->mode
);
1969 iv0
->extend_mode
= comp_mode
;
1972 if (iv1
->extend_mode
!= comp_mode
)
1974 if (iv1
->mode
!= iv1
->extend_mode
1975 || iv1
->step
!= const0_rtx
)
1978 iv1
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1979 comp_mode
, iv1
->base
, iv1
->mode
);
1980 iv1
->extend_mode
= comp_mode
;
1983 /* Check that both ivs belong to a range of a single mode. If one of the
1984 operands is an invariant, we may need to shorten it into the common
1986 if (iv0
->mode
== iv0
->extend_mode
1987 && iv0
->step
== const0_rtx
1988 && iv0
->mode
!= iv1
->mode
)
1989 shorten_into_mode (iv0
, iv1
->mode
, cond
, signed_p
, desc
);
1991 if (iv1
->mode
== iv1
->extend_mode
1992 && iv1
->step
== const0_rtx
1993 && iv0
->mode
!= iv1
->mode
)
1994 shorten_into_mode (iv1
, iv0
->mode
, swap_condition (cond
), signed_p
, desc
);
1996 if (iv0
->mode
!= iv1
->mode
)
1999 desc
->mode
= iv0
->mode
;
2000 desc
->signed_p
= signed_p
;
2005 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2006 the result into DESC. Very similar to determine_number_of_iterations
2007 (basically its rtl version), complicated by things like subregs. */
2010 iv_number_of_iterations (struct loop
*loop
, rtx insn
, rtx condition
,
2011 struct niter_desc
*desc
)
2013 rtx op0
, op1
, delta
, step
, bound
, may_xform
, def_insn
, tmp
, tmp0
, tmp1
;
2014 struct rtx_iv iv0
, iv1
, tmp_iv
;
2015 rtx assumption
, may_not_xform
;
2017 enum machine_mode mode
, comp_mode
;
2018 rtx mmin
, mmax
, mode_mmin
, mode_mmax
;
2019 unsigned HOST_WIDEST_INT s
, size
, d
, inv
;
2020 HOST_WIDEST_INT up
, down
, inc
, step_val
;
2021 int was_sharp
= false;
2025 /* The meaning of these assumptions is this:
2027 then the rest of information does not have to be valid
2028 if noloop_assumptions then the loop does not roll
2029 if infinite then this exit is never used */
2031 desc
->assumptions
= NULL_RTX
;
2032 desc
->noloop_assumptions
= NULL_RTX
;
2033 desc
->infinite
= NULL_RTX
;
2034 desc
->simple_p
= true;
2036 desc
->const_iter
= false;
2037 desc
->niter_expr
= NULL_RTX
;
2038 desc
->niter_max
= 0;
2040 cond
= GET_CODE (condition
);
2041 if (!COMPARISON_P (condition
))
2044 mode
= GET_MODE (XEXP (condition
, 0));
2045 if (mode
== VOIDmode
)
2046 mode
= GET_MODE (XEXP (condition
, 1));
2047 /* The constant comparisons should be folded. */
2048 if (mode
== VOIDmode
)
2051 /* We only handle integers or pointers. */
2052 if (GET_MODE_CLASS (mode
) != MODE_INT
2053 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
2056 op0
= XEXP (condition
, 0);
2057 def_insn
= iv_get_reaching_def (insn
, op0
);
2058 if (!iv_analyze (def_insn
, op0
, &iv0
))
2060 if (iv0
.extend_mode
== VOIDmode
)
2061 iv0
.mode
= iv0
.extend_mode
= mode
;
2063 op1
= XEXP (condition
, 1);
2064 def_insn
= iv_get_reaching_def (insn
, op1
);
2065 if (!iv_analyze (def_insn
, op1
, &iv1
))
2067 if (iv1
.extend_mode
== VOIDmode
)
2068 iv1
.mode
= iv1
.extend_mode
= mode
;
2070 if (GET_MODE_BITSIZE (iv0
.extend_mode
) > HOST_BITS_PER_WIDE_INT
2071 || GET_MODE_BITSIZE (iv1
.extend_mode
) > HOST_BITS_PER_WIDE_INT
)
2074 /* Check condition and normalize it. */
2082 tmp_iv
= iv0
; iv0
= iv1
; iv1
= tmp_iv
;
2083 cond
= swap_condition (cond
);
2095 /* Handle extends. This is relatively nontrivial, so we only try in some
2096 easy cases, when we can canonicalize the ivs (possibly by adding some
2097 assumptions) to shape subreg (base + i * step). This function also fills
2098 in desc->mode and desc->signed_p. */
2100 if (!canonicalize_iv_subregs (&iv0
, &iv1
, cond
, desc
))
2103 comp_mode
= iv0
.extend_mode
;
2105 size
= GET_MODE_BITSIZE (mode
);
2106 get_mode_bounds (mode
, (cond
== LE
|| cond
== LT
), comp_mode
, &mmin
, &mmax
);
2107 mode_mmin
= lowpart_subreg (mode
, mmin
, comp_mode
);
2108 mode_mmax
= lowpart_subreg (mode
, mmax
, comp_mode
);
2110 if (GET_CODE (iv0
.step
) != CONST_INT
|| GET_CODE (iv1
.step
) != CONST_INT
)
2113 /* We can take care of the case of two induction variables chasing each other
2114 if the test is NE. I have never seen a loop using it, but still it is
2116 if (iv0
.step
!= const0_rtx
&& iv1
.step
!= const0_rtx
)
2121 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2122 iv1
.step
= const0_rtx
;
2125 /* This is either infinite loop or the one that ends immediately, depending
2126 on initial values. Unswitching should remove this kind of conditions. */
2127 if (iv0
.step
== const0_rtx
&& iv1
.step
== const0_rtx
)
2132 if (iv0
.step
== const0_rtx
)
2133 step_val
= -INTVAL (iv1
.step
);
2135 step_val
= INTVAL (iv0
.step
);
2137 /* Ignore loops of while (i-- < 10) type. */
2141 step_is_pow2
= !(step_val
& (step_val
- 1));
2145 /* We do not care about whether the step is power of two in this
2147 step_is_pow2
= false;
2151 /* Some more condition normalization. We must record some assumptions
2152 due to overflows. */
2157 /* We want to take care only of non-sharp relationals; this is easy,
2158 as in cases the overflow would make the transformation unsafe
2159 the loop does not roll. Seemingly it would make more sense to want
2160 to take care of sharp relationals instead, as NE is more similar to
2161 them, but the problem is that here the transformation would be more
2162 difficult due to possibly infinite loops. */
2163 if (iv0
.step
== const0_rtx
)
2165 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2166 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2168 if (assumption
== const_true_rtx
)
2170 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2171 iv0
.base
, const1_rtx
);
2175 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2176 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2178 if (assumption
== const_true_rtx
)
2180 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2181 iv1
.base
, constm1_rtx
);
2184 if (assumption
!= const0_rtx
)
2185 desc
->noloop_assumptions
=
2186 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2187 cond
= (cond
== LT
) ? LE
: LEU
;
2189 /* It will be useful to be able to tell the difference once more in
2190 LE -> NE reduction. */
2196 /* Take care of trivially infinite loops. */
2199 if (iv0
.step
== const0_rtx
)
2201 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2202 if (rtx_equal_p (tmp
, mode_mmin
))
2205 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2211 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2212 if (rtx_equal_p (tmp
, mode_mmax
))
2215 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2221 /* If we can we want to take care of NE conditions instead of size
2222 comparisons, as they are much more friendly (most importantly
2223 this takes care of special handling of loops with step 1). We can
2224 do it if we first check that upper bound is greater or equal to
2225 lower bound, their difference is constant c modulo step and that
2226 there is not an overflow. */
2229 if (iv0
.step
== const0_rtx
)
2230 step
= simplify_gen_unary (NEG
, comp_mode
, iv1
.step
, comp_mode
);
2233 delta
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2234 delta
= lowpart_subreg (mode
, delta
, comp_mode
);
2235 delta
= simplify_gen_binary (UMOD
, mode
, delta
, step
);
2236 may_xform
= const0_rtx
;
2237 may_not_xform
= const_true_rtx
;
2239 if (GET_CODE (delta
) == CONST_INT
)
2241 if (was_sharp
&& INTVAL (delta
) == INTVAL (step
) - 1)
2243 /* A special case. We have transformed condition of type
2244 for (i = 0; i < 4; i += 4)
2246 for (i = 0; i <= 3; i += 4)
2247 obviously if the test for overflow during that transformation
2248 passed, we cannot overflow here. Most importantly any
2249 loop with sharp end condition and step 1 falls into this
2250 category, so handling this case specially is definitely
2251 worth the troubles. */
2252 may_xform
= const_true_rtx
;
2254 else if (iv0
.step
== const0_rtx
)
2256 bound
= simplify_gen_binary (PLUS
, comp_mode
, mmin
, step
);
2257 bound
= simplify_gen_binary (MINUS
, comp_mode
, bound
, delta
);
2258 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2259 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2260 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2262 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2268 bound
= simplify_gen_binary (MINUS
, comp_mode
, mmax
, step
);
2269 bound
= simplify_gen_binary (PLUS
, comp_mode
, bound
, delta
);
2270 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2271 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2272 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2274 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2280 if (may_xform
!= const0_rtx
)
2282 /* We perform the transformation always provided that it is not
2283 completely senseless. This is OK, as we would need this assumption
2284 to determine the number of iterations anyway. */
2285 if (may_xform
!= const_true_rtx
)
2287 /* If the step is a power of two and the final value we have
2288 computed overflows, the cycle is infinite. Otherwise it
2289 is nontrivial to compute the number of iterations. */
2291 desc
->infinite
= alloc_EXPR_LIST (0, may_not_xform
,
2294 desc
->assumptions
= alloc_EXPR_LIST (0, may_xform
,
2298 /* We are going to lose some information about upper bound on
2299 number of iterations in this step, so record the information
2301 inc
= INTVAL (iv0
.step
) - INTVAL (iv1
.step
);
2302 if (GET_CODE (iv1
.base
) == CONST_INT
)
2303 up
= INTVAL (iv1
.base
);
2305 up
= INTVAL (mode_mmax
) - inc
;
2306 down
= INTVAL (GET_CODE (iv0
.base
) == CONST_INT
2309 desc
->niter_max
= (up
- down
) / inc
+ 1;
2311 if (iv0
.step
== const0_rtx
)
2313 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, delta
);
2314 iv0
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.base
, step
);
2318 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, delta
);
2319 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, step
);
2322 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2323 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2324 assumption
= simplify_gen_relational (reverse_condition (cond
),
2325 SImode
, mode
, tmp0
, tmp1
);
2326 if (assumption
== const_true_rtx
)
2328 else if (assumption
!= const0_rtx
)
2329 desc
->noloop_assumptions
=
2330 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2335 /* Count the number of iterations. */
2338 /* Everything we do here is just arithmetics modulo size of mode. This
2339 makes us able to do more involved computations of number of iterations
2340 than in other cases. First transform the condition into shape
2341 s * i <> c, with s positive. */
2342 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2343 iv0
.base
= const0_rtx
;
2344 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2345 iv1
.step
= const0_rtx
;
2346 if (INTVAL (iv0
.step
) < 0)
2348 iv0
.step
= simplify_gen_unary (NEG
, comp_mode
, iv0
.step
, mode
);
2349 iv1
.base
= simplify_gen_unary (NEG
, comp_mode
, iv1
.base
, mode
);
2351 iv0
.step
= lowpart_subreg (mode
, iv0
.step
, comp_mode
);
2353 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2354 is infinite. Otherwise, the number of iterations is
2355 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2356 s
= INTVAL (iv0
.step
); d
= 1;
2363 bound
= GEN_INT (((unsigned HOST_WIDEST_INT
) 1 << (size
- 1 ) << 1) - 1);
2365 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2366 tmp
= simplify_gen_binary (UMOD
, mode
, tmp1
, GEN_INT (d
));
2367 assumption
= simplify_gen_relational (NE
, SImode
, mode
, tmp
, const0_rtx
);
2368 desc
->infinite
= alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2370 tmp
= simplify_gen_binary (UDIV
, mode
, tmp1
, GEN_INT (d
));
2371 inv
= inverse (s
, size
);
2372 inv
= trunc_int_for_mode (inv
, mode
);
2373 tmp
= simplify_gen_binary (MULT
, mode
, tmp
, GEN_INT (inv
));
2374 desc
->niter_expr
= simplify_gen_binary (AND
, mode
, tmp
, bound
);
2378 if (iv1
.step
== const0_rtx
)
2379 /* Condition in shape a + s * i <= b
2380 We must know that b + s does not overflow and a <= b + s and then we
2381 can compute number of iterations as (b + s - a) / s. (It might
2382 seem that we in fact could be more clever about testing the b + s
2383 overflow condition using some information about b - a mod s,
2384 but it was already taken into account during LE -> NE transform). */
2387 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2388 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2390 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmax
,
2391 lowpart_subreg (mode
, step
,
2397 /* If s is power of 2, we know that the loop is infinite if
2398 a % s <= b % s and b + s overflows. */
2399 assumption
= simplify_gen_relational (reverse_condition (cond
),
2403 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2404 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2405 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2406 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2408 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2412 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2415 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2418 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, iv0
.step
);
2419 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2420 assumption
= simplify_gen_relational (reverse_condition (cond
),
2421 SImode
, mode
, tmp0
, tmp
);
2423 delta
= simplify_gen_binary (PLUS
, mode
, tmp1
, step
);
2424 delta
= simplify_gen_binary (MINUS
, mode
, delta
, tmp0
);
2428 /* Condition in shape a <= b - s * i
2429 We must know that a - s does not overflow and a - s <= b and then
2430 we can again compute number of iterations as (b - (a - s)) / s. */
2431 step
= simplify_gen_unary (NEG
, mode
, iv1
.step
, mode
);
2432 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2433 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2435 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmin
,
2436 lowpart_subreg (mode
, step
, comp_mode
));
2441 /* If s is power of 2, we know that the loop is infinite if
2442 a % s <= b % s and a - s overflows. */
2443 assumption
= simplify_gen_relational (reverse_condition (cond
),
2447 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2448 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2449 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2450 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2452 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2456 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2459 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2462 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, iv1
.step
);
2463 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2464 assumption
= simplify_gen_relational (reverse_condition (cond
),
2467 delta
= simplify_gen_binary (MINUS
, mode
, tmp0
, step
);
2468 delta
= simplify_gen_binary (MINUS
, mode
, tmp1
, delta
);
2470 if (assumption
== const_true_rtx
)
2472 else if (assumption
!= const0_rtx
)
2473 desc
->noloop_assumptions
=
2474 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2475 delta
= simplify_gen_binary (UDIV
, mode
, delta
, step
);
2476 desc
->niter_expr
= delta
;
2479 old_niter
= desc
->niter_expr
;
2481 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2482 if (desc
->assumptions
2483 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2485 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2486 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2487 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2489 /* Rerun the simplification. Consider code (created by copying loop headers)
2501 The first pass determines that i = 0, the second pass uses it to eliminate
2502 noloop assumption. */
2504 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2505 if (desc
->assumptions
2506 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2508 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2509 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2510 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2512 if (desc
->noloop_assumptions
2513 && XEXP (desc
->noloop_assumptions
, 0) == const_true_rtx
)
2516 if (GET_CODE (desc
->niter_expr
) == CONST_INT
)
2518 unsigned HOST_WIDEST_INT val
= INTVAL (desc
->niter_expr
);
2520 desc
->const_iter
= true;
2521 desc
->niter_max
= desc
->niter
= val
& GET_MODE_MASK (desc
->mode
);
2525 if (!desc
->niter_max
)
2526 desc
->niter_max
= determine_max_iter (desc
);
2528 /* simplify_using_initial_values does a copy propagation on the registers
2529 in the expression for the number of iterations. This prolongs life
2530 ranges of registers and increases register pressure, and usually
2531 brings no gain (and if it happens to do, the cse pass will take care
2532 of it anyway). So prevent this behavior, unless it enabled us to
2533 derive that the number of iterations is a constant. */
2534 desc
->niter_expr
= old_niter
;
2540 desc
->simple_p
= false;
2544 desc
->const_iter
= true;
2546 desc
->niter_max
= 0;
2547 desc
->niter_expr
= const0_rtx
;
2551 /* Checks whether E is a simple exit from LOOP and stores its description
2555 check_simple_exit (struct loop
*loop
, edge e
, struct niter_desc
*desc
)
2557 basic_block exit_bb
;
2562 desc
->simple_p
= false;
2564 /* It must belong directly to the loop. */
2565 if (exit_bb
->loop_father
!= loop
)
2568 /* It must be tested (at least) once during any iteration. */
2569 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit_bb
))
2572 /* It must end in a simple conditional jump. */
2573 if (!any_condjump_p (BB_END (exit_bb
)))
2576 ein
= EDGE_SUCC (exit_bb
, 0);
2578 ein
= EDGE_SUCC (exit_bb
, 1);
2581 desc
->in_edge
= ein
;
2583 /* Test whether the condition is suitable. */
2584 if (!(condition
= get_condition (BB_END (ein
->src
), &at
, false, false)))
2587 if (ein
->flags
& EDGE_FALLTHRU
)
2589 condition
= reversed_condition (condition
);
2594 /* Check that we are able to determine number of iterations and fill
2595 in information about it. */
2596 iv_number_of_iterations (loop
, at
, condition
, desc
);
2599 /* Finds a simple exit of LOOP and stores its description into DESC. */
2602 find_simple_exit (struct loop
*loop
, struct niter_desc
*desc
)
2607 struct niter_desc act
;
2611 desc
->simple_p
= false;
2612 body
= get_loop_body (loop
);
2614 for (i
= 0; i
< loop
->num_nodes
; i
++)
2616 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
2618 if (flow_bb_inside_loop_p (loop
, e
->dest
))
2621 check_simple_exit (loop
, e
, &act
);
2625 /* Prefer constant iterations; the less the better. */
2628 else if (!act
.const_iter
2629 || (desc
->const_iter
&& act
.niter
>= desc
->niter
))
2639 fprintf (dump_file
, "Loop %d is simple:\n", loop
->num
);
2640 fprintf (dump_file
, " simple exit %d -> %d\n",
2641 desc
->out_edge
->src
->index
,
2642 desc
->out_edge
->dest
->index
);
2643 if (desc
->assumptions
)
2645 fprintf (dump_file
, " assumptions: ");
2646 print_rtl (dump_file
, desc
->assumptions
);
2647 fprintf (dump_file
, "\n");
2649 if (desc
->noloop_assumptions
)
2651 fprintf (dump_file
, " does not roll if: ");
2652 print_rtl (dump_file
, desc
->noloop_assumptions
);
2653 fprintf (dump_file
, "\n");
2657 fprintf (dump_file
, " infinite if: ");
2658 print_rtl (dump_file
, desc
->infinite
);
2659 fprintf (dump_file
, "\n");
2662 fprintf (dump_file
, " number of iterations: ");
2663 print_rtl (dump_file
, desc
->niter_expr
);
2664 fprintf (dump_file
, "\n");
2666 fprintf (dump_file
, " upper bound: ");
2667 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
, desc
->niter_max
);
2668 fprintf (dump_file
, "\n");
2671 fprintf (dump_file
, "Loop %d is not simple.\n", loop
->num
);
2677 /* Creates a simple loop description of LOOP if it was not computed
2681 get_simple_loop_desc (struct loop
*loop
)
2683 struct niter_desc
*desc
= simple_loop_desc (loop
);
2688 desc
= xmalloc (sizeof (struct niter_desc
));
2689 iv_analysis_loop_init (loop
);
2690 find_simple_exit (loop
, desc
);
2696 /* Releases simple loop description for LOOP. */
2699 free_simple_loop_desc (struct loop
*loop
)
2701 struct niter_desc
*desc
= simple_loop_desc (loop
);