1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
37 Induction variable is then simply analyzed by walking the use-def
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
48 iv_analysis_done (); */
52 #include "coretypes.h"
55 #include "hard-reg-set.h"
57 #include "basic-block.h"
64 /* The insn information. */
71 /* The previous definition of the register defined by the single
75 /* The description of the iv. */
79 static struct insn_info
*insn_info
;
81 /* The last definition of register. */
87 static struct rtx_iv
*bivs
;
89 /* Maximal insn number for that there is place in insn_info array. */
91 static unsigned max_insn_no
;
93 /* Maximal register number for that there is place in bivs and last_def
96 static unsigned max_reg_no
;
98 /* Dumps information about IV to FILE. */
100 extern void dump_iv_info (FILE *, struct rtx_iv
*);
102 dump_iv_info (FILE *file
, struct rtx_iv
*iv
)
106 fprintf (file
, "not simple");
110 if (iv
->step
== const0_rtx
111 && !iv
->first_special
)
112 fprintf (file
, "invariant ");
114 print_rtl (file
, iv
->base
);
115 if (iv
->step
!= const0_rtx
)
117 fprintf (file
, " + ");
118 print_rtl (file
, iv
->step
);
119 fprintf (file
, " * iteration");
121 fprintf (file
, " (in %s)", GET_MODE_NAME (iv
->mode
));
123 if (iv
->mode
!= iv
->extend_mode
)
124 fprintf (file
, " %s to %s",
125 rtx_name
[iv
->extend
],
126 GET_MODE_NAME (iv
->extend_mode
));
128 if (iv
->mult
!= const1_rtx
)
130 fprintf (file
, " * ");
131 print_rtl (file
, iv
->mult
);
133 if (iv
->delta
!= const0_rtx
)
135 fprintf (file
, " + ");
136 print_rtl (file
, iv
->delta
);
138 if (iv
->first_special
)
139 fprintf (file
, " (first special)");
142 /* Assigns luids to insns in basic block BB. */
145 assign_luids (basic_block bb
)
150 FOR_BB_INSNS (bb
, insn
)
152 uid
= INSN_UID (insn
);
153 insn_info
[uid
].luid
= i
++;
154 insn_info
[uid
].prev_def
= NULL_RTX
;
155 insn_info
[uid
].iv
.analysed
= false;
159 /* Generates a subreg to get the least significant part of EXPR (in mode
160 INNER_MODE) to OUTER_MODE. */
163 lowpart_subreg (enum machine_mode outer_mode
, rtx expr
,
164 enum machine_mode inner_mode
)
166 return simplify_gen_subreg (outer_mode
, expr
, inner_mode
,
167 subreg_lowpart_offset (outer_mode
, inner_mode
));
170 /* Checks whether REG is a well-behaved register. */
173 simple_reg_p (rtx reg
)
177 if (GET_CODE (reg
) == SUBREG
)
179 if (!subreg_lowpart_p (reg
))
181 reg
= SUBREG_REG (reg
);
188 if (HARD_REGISTER_NUM_P (r
))
191 if (GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
194 if (last_def
[r
] == const0_rtx
)
200 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
203 simple_set_p (rtx lhs
, rtx rhs
)
208 || !simple_reg_p (lhs
))
211 if (CONSTANT_P (rhs
))
214 switch (GET_CODE (rhs
))
218 return simple_reg_p (rhs
);
223 return simple_reg_p (XEXP (rhs
, 0));
232 if (!simple_reg_p (op0
)
233 && !CONSTANT_P (op0
))
236 if (!simple_reg_p (op1
)
237 && !CONSTANT_P (op1
))
240 if (GET_CODE (rhs
) == MULT
242 && !CONSTANT_P (op1
))
245 if (GET_CODE (rhs
) == ASHIFT
256 /* Mark single SET in INSN. */
259 mark_single_set (rtx insn
, rtx set
)
261 rtx def
= SET_DEST (set
), src
;
264 src
= find_reg_equal_equiv_note (insn
);
270 if (!simple_set_p (SET_DEST (set
), src
))
274 uid
= INSN_UID (insn
);
276 bivs
[regno
].analysed
= false;
277 insn_info
[uid
].prev_def
= last_def
[regno
];
278 last_def
[regno
] = insn
;
283 /* Invalidate register REG unless it is equal to EXCEPT. */
286 kill_sets (rtx reg
, rtx by ATTRIBUTE_UNUSED
, void *except
)
288 if (GET_CODE (reg
) == SUBREG
)
289 reg
= SUBREG_REG (reg
);
295 last_def
[REGNO (reg
)] = const0_rtx
;
298 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
302 mark_sets (basic_block bb
, bool dom
)
306 FOR_BB_INSNS (bb
, insn
)
312 && (set
= single_set (insn
)))
313 def
= mark_single_set (insn
, set
);
317 note_stores (PATTERN (insn
), kill_sets
, def
);
321 /* Prepare the data for an induction variable analysis of a LOOP. */
324 iv_analysis_loop_init (struct loop
*loop
)
326 basic_block
*body
= get_loop_body_in_dom_order (loop
);
329 if ((unsigned) get_max_uid () >= max_insn_no
)
331 /* Add some reserve for insns and registers produced in optimizations. */
332 max_insn_no
= get_max_uid () + 100;
335 insn_info
= xmalloc (max_insn_no
* sizeof (struct insn_info
));
338 if ((unsigned) max_reg_num () >= max_reg_no
)
340 max_reg_no
= max_reg_num () + 100;
343 last_def
= xmalloc (max_reg_no
* sizeof (rtx
));
346 bivs
= xmalloc (max_reg_no
* sizeof (struct rtx_iv
));
349 memset (last_def
, 0, max_reg_num () * sizeof (rtx
));
351 for (b
= 0; b
< loop
->num_nodes
; b
++)
353 assign_luids (body
[b
]);
354 mark_sets (body
[b
], just_once_each_iteration_p (loop
, body
[b
]));
360 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
361 is returned. If INSN is before the first def in the loop, NULL_RTX is
365 iv_get_reaching_def (rtx insn
, rtx reg
)
367 unsigned regno
, luid
, auid
;
371 if (GET_CODE (reg
) == SUBREG
)
373 if (!subreg_lowpart_p (reg
))
375 reg
= SUBREG_REG (reg
);
382 || last_def
[regno
] == const0_rtx
)
383 return last_def
[regno
];
385 bb
= BLOCK_FOR_INSN (insn
);
386 luid
= insn_info
[INSN_UID (insn
)].luid
;
388 ainsn
= last_def
[regno
];
391 abb
= BLOCK_FOR_INSN (ainsn
);
393 if (dominated_by_p (CDI_DOMINATORS
, bb
, abb
))
396 auid
= INSN_UID (ainsn
);
397 ainsn
= insn_info
[auid
].prev_def
;
405 abb
= BLOCK_FOR_INSN (ainsn
);
409 auid
= INSN_UID (ainsn
);
410 if (luid
> insn_info
[auid
].luid
)
413 ainsn
= insn_info
[auid
].prev_def
;
419 /* Sets IV to invariant CST in MODE. Always returns true (just for
420 consistency with other iv manipulation functions that may fail). */
423 iv_constant (struct rtx_iv
*iv
, rtx cst
, enum machine_mode mode
)
425 if (mode
== VOIDmode
)
426 mode
= GET_MODE (cst
);
431 iv
->step
= const0_rtx
;
432 iv
->first_special
= false;
433 iv
->extend
= UNKNOWN
;
434 iv
->extend_mode
= iv
->mode
;
435 iv
->delta
= const0_rtx
;
436 iv
->mult
= const1_rtx
;
441 /* Evaluates application of subreg to MODE on IV. */
444 iv_subreg (struct rtx_iv
*iv
, enum machine_mode mode
)
446 /* If iv is invariant, just calculate the new value. */
447 if (iv
->step
== const0_rtx
448 && !iv
->first_special
)
450 rtx val
= get_iv_value (iv
, const0_rtx
);
451 val
= lowpart_subreg (mode
, val
, iv
->extend_mode
);
454 iv
->extend
= UNKNOWN
;
455 iv
->mode
= iv
->extend_mode
= mode
;
456 iv
->delta
= const0_rtx
;
457 iv
->mult
= const1_rtx
;
461 if (iv
->extend_mode
== mode
)
464 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (iv
->mode
))
467 iv
->extend
= UNKNOWN
;
470 iv
->base
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
471 simplify_gen_binary (MULT
, iv
->extend_mode
,
472 iv
->base
, iv
->mult
));
473 iv
->step
= simplify_gen_binary (MULT
, iv
->extend_mode
, iv
->step
, iv
->mult
);
474 iv
->mult
= const1_rtx
;
475 iv
->delta
= const0_rtx
;
476 iv
->first_special
= false;
481 /* Evaluates application of EXTEND to MODE on IV. */
484 iv_extend (struct rtx_iv
*iv
, enum rtx_code extend
, enum machine_mode mode
)
486 /* If iv is invariant, just calculate the new value. */
487 if (iv
->step
== const0_rtx
488 && !iv
->first_special
)
490 rtx val
= get_iv_value (iv
, const0_rtx
);
491 val
= simplify_gen_unary (extend
, mode
, val
, iv
->extend_mode
);
494 iv
->extend
= UNKNOWN
;
495 iv
->mode
= iv
->extend_mode
= mode
;
496 iv
->delta
= const0_rtx
;
497 iv
->mult
= const1_rtx
;
501 if (mode
!= iv
->extend_mode
)
504 if (iv
->extend
!= UNKNOWN
505 && iv
->extend
!= extend
)
513 /* Evaluates negation of IV. */
516 iv_neg (struct rtx_iv
*iv
)
518 if (iv
->extend
== UNKNOWN
)
520 iv
->base
= simplify_gen_unary (NEG
, iv
->extend_mode
,
521 iv
->base
, iv
->extend_mode
);
522 iv
->step
= simplify_gen_unary (NEG
, iv
->extend_mode
,
523 iv
->step
, iv
->extend_mode
);
527 iv
->delta
= simplify_gen_unary (NEG
, iv
->extend_mode
,
528 iv
->delta
, iv
->extend_mode
);
529 iv
->mult
= simplify_gen_unary (NEG
, iv
->extend_mode
,
530 iv
->mult
, iv
->extend_mode
);
536 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
539 iv_add (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
, enum rtx_code op
)
541 enum machine_mode mode
;
544 /* Extend the constant to extend_mode of the other operand if necessary. */
545 if (iv0
->extend
== UNKNOWN
546 && iv0
->mode
== iv0
->extend_mode
547 && iv0
->step
== const0_rtx
548 && GET_MODE_SIZE (iv0
->extend_mode
) < GET_MODE_SIZE (iv1
->extend_mode
))
550 iv0
->extend_mode
= iv1
->extend_mode
;
551 iv0
->base
= simplify_gen_unary (ZERO_EXTEND
, iv0
->extend_mode
,
552 iv0
->base
, iv0
->mode
);
554 if (iv1
->extend
== UNKNOWN
555 && iv1
->mode
== iv1
->extend_mode
556 && iv1
->step
== const0_rtx
557 && GET_MODE_SIZE (iv1
->extend_mode
) < GET_MODE_SIZE (iv0
->extend_mode
))
559 iv1
->extend_mode
= iv0
->extend_mode
;
560 iv1
->base
= simplify_gen_unary (ZERO_EXTEND
, iv1
->extend_mode
,
561 iv1
->base
, iv1
->mode
);
564 mode
= iv0
->extend_mode
;
565 if (mode
!= iv1
->extend_mode
)
568 if (iv0
->extend
== UNKNOWN
&& iv1
->extend
== UNKNOWN
)
570 if (iv0
->mode
!= iv1
->mode
)
573 iv0
->base
= simplify_gen_binary (op
, mode
, iv0
->base
, iv1
->base
);
574 iv0
->step
= simplify_gen_binary (op
, mode
, iv0
->step
, iv1
->step
);
579 /* Handle addition of constant. */
580 if (iv1
->extend
== UNKNOWN
582 && iv1
->step
== const0_rtx
)
584 iv0
->delta
= simplify_gen_binary (op
, mode
, iv0
->delta
, iv1
->base
);
588 if (iv0
->extend
== UNKNOWN
590 && iv0
->step
== const0_rtx
)
598 iv0
->delta
= simplify_gen_binary (PLUS
, mode
, iv0
->delta
, arg
);
605 /* Evaluates multiplication of IV by constant CST. */
608 iv_mult (struct rtx_iv
*iv
, rtx mby
)
610 enum machine_mode mode
= iv
->extend_mode
;
612 if (GET_MODE (mby
) != VOIDmode
613 && GET_MODE (mby
) != mode
)
616 if (iv
->extend
== UNKNOWN
)
618 iv
->base
= simplify_gen_binary (MULT
, mode
, iv
->base
, mby
);
619 iv
->step
= simplify_gen_binary (MULT
, mode
, iv
->step
, mby
);
623 iv
->delta
= simplify_gen_binary (MULT
, mode
, iv
->delta
, mby
);
624 iv
->mult
= simplify_gen_binary (MULT
, mode
, iv
->mult
, mby
);
630 /* Evaluates shift of IV by constant CST. */
633 iv_shift (struct rtx_iv
*iv
, rtx mby
)
635 enum machine_mode mode
= iv
->extend_mode
;
637 if (GET_MODE (mby
) != VOIDmode
638 && GET_MODE (mby
) != mode
)
641 if (iv
->extend
== UNKNOWN
)
643 iv
->base
= simplify_gen_binary (ASHIFT
, mode
, iv
->base
, mby
);
644 iv
->step
= simplify_gen_binary (ASHIFT
, mode
, iv
->step
, mby
);
648 iv
->delta
= simplify_gen_binary (ASHIFT
, mode
, iv
->delta
, mby
);
649 iv
->mult
= simplify_gen_binary (ASHIFT
, mode
, iv
->mult
, mby
);
655 /* The recursive part of get_biv_step. Gets the value of the single value
656 defined in INSN wrto initial value of REG inside loop, in shape described
660 get_biv_step_1 (rtx insn
, rtx reg
,
661 rtx
*inner_step
, enum machine_mode
*inner_mode
,
662 enum rtx_code
*extend
, enum machine_mode outer_mode
,
665 rtx set
, rhs
, op0
= NULL_RTX
, op1
= NULL_RTX
;
666 rtx next
, nextr
, def_insn
, tmp
;
669 set
= single_set (insn
);
670 rhs
= find_reg_equal_equiv_note (insn
);
676 code
= GET_CODE (rhs
);
689 if (code
== PLUS
&& CONSTANT_P (op0
))
691 tmp
= op0
; op0
= op1
; op1
= tmp
;
694 if (!simple_reg_p (op0
)
695 || !CONSTANT_P (op1
))
698 if (GET_MODE (rhs
) != outer_mode
)
700 /* ppc64 uses expressions like
702 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
704 this is equivalent to
706 (set x':DI (plus:DI y:DI 1))
707 (set x:SI (subreg:SI (x':DI)). */
708 if (GET_CODE (op0
) != SUBREG
)
710 if (GET_MODE (SUBREG_REG (op0
)) != outer_mode
)
719 if (GET_MODE (rhs
) != outer_mode
)
723 if (!simple_reg_p (op0
))
733 if (GET_CODE (next
) == SUBREG
)
735 if (!subreg_lowpart_p (next
))
738 nextr
= SUBREG_REG (next
);
739 if (GET_MODE (nextr
) != outer_mode
)
745 def_insn
= iv_get_reaching_def (insn
, nextr
);
746 if (def_insn
== const0_rtx
)
751 if (!rtx_equal_p (nextr
, reg
))
754 *inner_step
= const0_rtx
;
756 *inner_mode
= outer_mode
;
757 *outer_step
= const0_rtx
;
759 else if (!get_biv_step_1 (def_insn
, reg
,
760 inner_step
, inner_mode
, extend
, outer_mode
,
764 if (GET_CODE (next
) == SUBREG
)
766 enum machine_mode amode
= GET_MODE (next
);
768 if (GET_MODE_SIZE (amode
) > GET_MODE_SIZE (*inner_mode
))
772 *inner_step
= simplify_gen_binary (PLUS
, outer_mode
,
773 *inner_step
, *outer_step
);
774 *outer_step
= const0_rtx
;
786 if (*inner_mode
== outer_mode
787 /* See comment in previous switch. */
788 || GET_MODE (rhs
) != outer_mode
)
789 *inner_step
= simplify_gen_binary (code
, outer_mode
,
792 *outer_step
= simplify_gen_binary (code
, outer_mode
,
798 gcc_assert (GET_MODE (op0
) == *inner_mode
799 && *extend
== UNKNOWN
800 && *outer_step
== const0_rtx
);
812 /* Gets the operation on register REG inside loop, in shape
814 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
816 If the operation cannot be described in this shape, return false. */
819 get_biv_step (rtx reg
, rtx
*inner_step
, enum machine_mode
*inner_mode
,
820 enum rtx_code
*extend
, enum machine_mode
*outer_mode
,
823 *outer_mode
= GET_MODE (reg
);
825 if (!get_biv_step_1 (last_def
[REGNO (reg
)], reg
,
826 inner_step
, inner_mode
, extend
, *outer_mode
,
830 gcc_assert ((*inner_mode
== *outer_mode
) != (*extend
!= UNKNOWN
));
831 gcc_assert (*inner_mode
!= *outer_mode
|| *outer_step
== const0_rtx
);
836 /* Determines whether DEF is a biv and if so, stores its description
840 iv_analyze_biv (rtx def
, struct rtx_iv
*iv
)
843 rtx inner_step
, outer_step
;
844 enum machine_mode inner_mode
, outer_mode
;
845 enum rtx_code extend
;
849 fprintf (dump_file
, "Analyzing ");
850 print_rtl (dump_file
, def
);
851 fprintf (dump_file
, " for bivness.\n");
856 if (!CONSTANT_P (def
))
859 return iv_constant (iv
, def
, VOIDmode
);
863 if (last_def
[regno
] == const0_rtx
)
866 fprintf (dump_file
, " not simple.\n");
870 if (last_def
[regno
] && bivs
[regno
].analysed
)
873 fprintf (dump_file
, " already analysed.\n");
876 return iv
->base
!= NULL_RTX
;
879 if (!last_def
[regno
])
881 iv_constant (iv
, def
, VOIDmode
);
886 if (!get_biv_step (def
, &inner_step
, &inner_mode
, &extend
,
887 &outer_mode
, &outer_step
))
893 /* Loop transforms base to es (base + inner_step) + outer_step,
894 where es means extend of subreg between inner_mode and outer_mode.
895 The corresponding induction variable is
897 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
899 iv
->base
= simplify_gen_binary (MINUS
, outer_mode
, def
, outer_step
);
900 iv
->step
= simplify_gen_binary (PLUS
, outer_mode
, inner_step
, outer_step
);
901 iv
->mode
= inner_mode
;
902 iv
->extend_mode
= outer_mode
;
904 iv
->mult
= const1_rtx
;
905 iv
->delta
= outer_step
;
906 iv
->first_special
= inner_mode
!= outer_mode
;
911 fprintf (dump_file
, " ");
912 dump_iv_info (dump_file
, iv
);
913 fprintf (dump_file
, "\n");
918 return iv
->base
!= NULL_RTX
;
921 /* Analyzes operand OP of INSN and stores the result to *IV. */
924 iv_analyze_op (rtx insn
, rtx op
, struct rtx_iv
*iv
)
928 bool inv
= CONSTANT_P (op
);
932 fprintf (dump_file
, "Analyzing operand ");
933 print_rtl (dump_file
, op
);
934 fprintf (dump_file
, " of insn ");
935 print_rtl_single (dump_file
, insn
);
938 if (GET_CODE (op
) == SUBREG
)
940 if (!subreg_lowpart_p (op
))
943 if (!iv_analyze_op (insn
, SUBREG_REG (op
), iv
))
946 return iv_subreg (iv
, GET_MODE (op
));
952 if (!last_def
[regno
])
954 else if (last_def
[regno
] == const0_rtx
)
957 fprintf (dump_file
, " not simple.\n");
964 iv_constant (iv
, op
, VOIDmode
);
968 fprintf (dump_file
, " ");
969 dump_iv_info (dump_file
, iv
);
970 fprintf (dump_file
, "\n");
975 def_insn
= iv_get_reaching_def (insn
, op
);
976 if (def_insn
== const0_rtx
)
979 fprintf (dump_file
, " not simple.\n");
983 return iv_analyze (def_insn
, op
, iv
);
986 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
989 iv_analyze (rtx insn
, rtx def
, struct rtx_iv
*iv
)
992 rtx set
, rhs
, mby
= NULL_RTX
, tmp
;
993 rtx op0
= NULL_RTX
, op1
= NULL_RTX
;
994 struct rtx_iv iv0
, iv1
;
995 enum machine_mode amode
;
998 if (insn
== const0_rtx
)
1001 if (GET_CODE (def
) == SUBREG
)
1003 if (!subreg_lowpart_p (def
))
1006 if (!iv_analyze (insn
, SUBREG_REG (def
), iv
))
1009 return iv_subreg (iv
, GET_MODE (def
));
1013 return iv_analyze_biv (def
, iv
);
1017 fprintf (dump_file
, "Analyzing def of ");
1018 print_rtl (dump_file
, def
);
1019 fprintf (dump_file
, " in insn ");
1020 print_rtl_single (dump_file
, insn
);
1023 uid
= INSN_UID (insn
);
1024 if (insn_info
[uid
].iv
.analysed
)
1027 fprintf (dump_file
, " already analysed.\n");
1028 *iv
= insn_info
[uid
].iv
;
1029 return iv
->base
!= NULL_RTX
;
1032 iv
->mode
= VOIDmode
;
1033 iv
->base
= NULL_RTX
;
1034 iv
->step
= NULL_RTX
;
1036 set
= single_set (insn
);
1037 rhs
= find_reg_equal_equiv_note (insn
);
1039 rhs
= XEXP (rhs
, 0);
1041 rhs
= SET_SRC (set
);
1042 code
= GET_CODE (rhs
);
1044 if (CONSTANT_P (rhs
))
1047 amode
= GET_MODE (def
);
1054 if (!subreg_lowpart_p (rhs
))
1066 op0
= XEXP (rhs
, 0);
1071 op0
= XEXP (rhs
, 0);
1072 op1
= XEXP (rhs
, 1);
1076 op0
= XEXP (rhs
, 0);
1077 mby
= XEXP (rhs
, 1);
1078 if (!CONSTANT_P (mby
))
1080 gcc_assert (CONSTANT_P (op0
));
1088 gcc_assert (!CONSTANT_P (XEXP (rhs
, 0)));
1089 op0
= XEXP (rhs
, 0);
1090 mby
= XEXP (rhs
, 1);
1097 amode
= GET_MODE (rhs
);
1102 if (!iv_analyze_op (insn
, op0
, &iv0
))
1105 if (iv0
.mode
== VOIDmode
)
1108 iv0
.extend_mode
= amode
;
1114 if (!iv_analyze_op (insn
, op1
, &iv1
))
1117 if (iv1
.mode
== VOIDmode
)
1120 iv1
.extend_mode
= amode
;
1128 if (!iv_extend (&iv0
, code
, amode
))
1139 if (!iv_add (&iv0
, &iv1
, code
))
1144 if (!iv_mult (&iv0
, mby
))
1149 if (!iv_shift (&iv0
, mby
))
1160 iv
->analysed
= true;
1161 insn_info
[uid
].iv
= *iv
;
1165 print_rtl (dump_file
, def
);
1166 fprintf (dump_file
, " in insn ");
1167 print_rtl_single (dump_file
, insn
);
1168 fprintf (dump_file
, " is ");
1169 dump_iv_info (dump_file
, iv
);
1170 fprintf (dump_file
, "\n");
1173 return iv
->base
!= NULL_RTX
;
1176 /* Checks whether definition of register REG in INSN a basic induction
1177 variable. IV analysis must have been initialized (via a call to
1178 iv_analysis_loop_init) for this function to produce a result. */
1181 biv_p (rtx insn
, rtx reg
)
1188 if (last_def
[REGNO (reg
)] != insn
)
1191 return iv_analyze_biv (reg
, &iv
);
1194 /* Calculates value of IV at ITERATION-th iteration. */
1197 get_iv_value (struct rtx_iv
*iv
, rtx iteration
)
1201 /* We would need to generate some if_then_else patterns, and so far
1202 it is not needed anywhere. */
1203 gcc_assert (!iv
->first_special
);
1205 if (iv
->step
!= const0_rtx
&& iteration
!= const0_rtx
)
1206 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->base
,
1207 simplify_gen_binary (MULT
, iv
->extend_mode
,
1208 iv
->step
, iteration
));
1212 if (iv
->extend_mode
== iv
->mode
)
1215 val
= lowpart_subreg (iv
->mode
, val
, iv
->extend_mode
);
1217 if (iv
->extend
== UNKNOWN
)
1220 val
= simplify_gen_unary (iv
->extend
, iv
->extend_mode
, val
, iv
->mode
);
1221 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
1222 simplify_gen_binary (MULT
, iv
->extend_mode
,
1228 /* Free the data for an induction variable analysis. */
1231 iv_analysis_done (void)
1252 /* Computes inverse to X modulo (1 << MOD). */
1254 static unsigned HOST_WIDEST_INT
1255 inverse (unsigned HOST_WIDEST_INT x
, int mod
)
1257 unsigned HOST_WIDEST_INT mask
=
1258 ((unsigned HOST_WIDEST_INT
) 1 << (mod
- 1) << 1) - 1;
1259 unsigned HOST_WIDEST_INT rslt
= 1;
1262 for (i
= 0; i
< mod
- 1; i
++)
1264 rslt
= (rslt
* x
) & mask
;
1271 /* Tries to estimate the maximum number of iterations. */
1273 static unsigned HOST_WIDEST_INT
1274 determine_max_iter (struct niter_desc
*desc
)
1276 rtx niter
= desc
->niter_expr
;
1277 rtx mmin
, mmax
, left
, right
;
1278 unsigned HOST_WIDEST_INT nmax
, inc
;
1280 if (GET_CODE (niter
) == AND
1281 && GET_CODE (XEXP (niter
, 0)) == CONST_INT
)
1283 nmax
= INTVAL (XEXP (niter
, 0));
1284 if (!(nmax
& (nmax
+ 1)))
1286 desc
->niter_max
= nmax
;
1291 get_mode_bounds (desc
->mode
, desc
->signed_p
, desc
->mode
, &mmin
, &mmax
);
1292 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1294 if (GET_CODE (niter
) == UDIV
)
1296 if (GET_CODE (XEXP (niter
, 1)) != CONST_INT
)
1298 desc
->niter_max
= nmax
;
1301 inc
= INTVAL (XEXP (niter
, 1));
1302 niter
= XEXP (niter
, 0);
1307 if (GET_CODE (niter
) == PLUS
)
1309 left
= XEXP (niter
, 0);
1310 right
= XEXP (niter
, 0);
1312 if (GET_CODE (right
) == CONST_INT
)
1313 right
= GEN_INT (-INTVAL (right
));
1315 else if (GET_CODE (niter
) == MINUS
)
1317 left
= XEXP (niter
, 0);
1318 right
= XEXP (niter
, 0);
1326 if (GET_CODE (left
) == CONST_INT
)
1328 if (GET_CODE (right
) == CONST_INT
)
1330 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1332 desc
->niter_max
= nmax
/ inc
;
1336 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1339 altered_reg_used (rtx
*reg
, void *alt
)
1344 return REGNO_REG_SET_P (alt
, REGNO (*reg
));
1347 /* Marks registers altered by EXPR in set ALT. */
1350 mark_altered (rtx expr
, rtx by ATTRIBUTE_UNUSED
, void *alt
)
1352 if (GET_CODE (expr
) == SUBREG
)
1353 expr
= SUBREG_REG (expr
);
1357 SET_REGNO_REG_SET (alt
, REGNO (expr
));
1360 /* Checks whether RHS is simple enough to process. */
1363 simple_rhs_p (rtx rhs
)
1367 if (CONSTANT_P (rhs
)
1371 switch (GET_CODE (rhs
))
1375 op0
= XEXP (rhs
, 0);
1376 op1
= XEXP (rhs
, 1);
1377 /* Allow reg + const sets only. */
1378 if (REG_P (op0
) && CONSTANT_P (op1
))
1380 if (REG_P (op1
) && CONSTANT_P (op0
))
1390 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1394 simplify_using_assignment (rtx insn
, rtx
*expr
, regset altered
)
1396 rtx set
= single_set (insn
);
1397 rtx lhs
= NULL_RTX
, rhs
;
1402 lhs
= SET_DEST (set
);
1404 || altered_reg_used (&lhs
, altered
))
1410 note_stores (PATTERN (insn
), mark_altered
, altered
);
1415 /* Kill all call clobbered registers. */
1416 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1417 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1418 SET_REGNO_REG_SET (altered
, i
);
1424 rhs
= find_reg_equal_equiv_note (insn
);
1426 rhs
= XEXP (rhs
, 0);
1428 rhs
= SET_SRC (set
);
1430 if (!simple_rhs_p (rhs
))
1433 if (for_each_rtx (&rhs
, altered_reg_used
, altered
))
1436 *expr
= simplify_replace_rtx (*expr
, lhs
, rhs
);
1439 /* Checks whether A implies B. */
1442 implies_p (rtx a
, rtx b
)
1444 rtx op0
, op1
, opb0
, opb1
, r
;
1445 enum machine_mode mode
;
1447 if (GET_CODE (a
) == EQ
)
1454 r
= simplify_replace_rtx (b
, op0
, op1
);
1455 if (r
== const_true_rtx
)
1461 r
= simplify_replace_rtx (b
, op1
, op0
);
1462 if (r
== const_true_rtx
)
1467 /* A < B implies A + 1 <= B. */
1468 if ((GET_CODE (a
) == GT
|| GET_CODE (a
) == LT
)
1469 && (GET_CODE (b
) == GE
|| GET_CODE (b
) == LE
))
1476 if (GET_CODE (a
) == GT
)
1483 if (GET_CODE (b
) == GE
)
1490 mode
= GET_MODE (op0
);
1491 if (mode
!= GET_MODE (opb0
))
1493 else if (mode
== VOIDmode
)
1495 mode
= GET_MODE (op1
);
1496 if (mode
!= GET_MODE (opb1
))
1500 if (mode
!= VOIDmode
1501 && rtx_equal_p (op1
, opb1
)
1502 && simplify_gen_binary (MINUS
, mode
, opb0
, op0
) == const1_rtx
)
1509 /* Canonicalizes COND so that
1511 (1) Ensure that operands are ordered according to
1512 swap_commutative_operands_p.
1513 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1514 for GE, GEU, and LEU. */
1517 canon_condition (rtx cond
)
1522 enum machine_mode mode
;
1524 code
= GET_CODE (cond
);
1525 op0
= XEXP (cond
, 0);
1526 op1
= XEXP (cond
, 1);
1528 if (swap_commutative_operands_p (op0
, op1
))
1530 code
= swap_condition (code
);
1536 mode
= GET_MODE (op0
);
1537 if (mode
== VOIDmode
)
1538 mode
= GET_MODE (op1
);
1539 gcc_assert (mode
!= VOIDmode
);
1541 if (GET_CODE (op1
) == CONST_INT
1542 && GET_MODE_CLASS (mode
) != MODE_CC
1543 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1545 HOST_WIDE_INT const_val
= INTVAL (op1
);
1546 unsigned HOST_WIDE_INT uconst_val
= const_val
;
1547 unsigned HOST_WIDE_INT max_val
1548 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
);
1553 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
1554 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
1557 /* When cross-compiling, const_val might be sign-extended from
1558 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1560 if ((HOST_WIDE_INT
) (const_val
& max_val
)
1561 != (((HOST_WIDE_INT
) 1
1562 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
1563 code
= GT
, op1
= gen_int_mode (const_val
- 1, mode
);
1567 if (uconst_val
< max_val
)
1568 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, mode
);
1572 if (uconst_val
!= 0)
1573 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, mode
);
1581 if (op0
!= XEXP (cond
, 0)
1582 || op1
!= XEXP (cond
, 1)
1583 || code
!= GET_CODE (cond
)
1584 || GET_MODE (cond
) != SImode
)
1585 cond
= gen_rtx_fmt_ee (code
, SImode
, op0
, op1
);
1590 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1591 set of altered regs. */
1594 simplify_using_condition (rtx cond
, rtx
*expr
, regset altered
)
1596 rtx rev
, reve
, exp
= *expr
;
1598 if (!COMPARISON_P (exp
))
1601 /* If some register gets altered later, we do not really speak about its
1602 value at the time of comparison. */
1604 && for_each_rtx (&cond
, altered_reg_used
, altered
))
1607 rev
= reversed_condition (cond
);
1608 reve
= reversed_condition (exp
);
1610 cond
= canon_condition (cond
);
1611 exp
= canon_condition (exp
);
1613 rev
= canon_condition (rev
);
1615 reve
= canon_condition (reve
);
1617 if (rtx_equal_p (exp
, cond
))
1619 *expr
= const_true_rtx
;
1624 if (rev
&& rtx_equal_p (exp
, rev
))
1630 if (implies_p (cond
, exp
))
1632 *expr
= const_true_rtx
;
1636 if (reve
&& implies_p (cond
, reve
))
1642 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1644 if (rev
&& implies_p (exp
, rev
))
1650 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1651 if (rev
&& reve
&& implies_p (reve
, rev
))
1653 *expr
= const_true_rtx
;
1657 /* We would like to have some other tests here. TODO. */
1662 /* Use relationship between A and *B to eventually eliminate *B.
1663 OP is the operation we consider. */
1666 eliminate_implied_condition (enum rtx_code op
, rtx a
, rtx
*b
)
1671 /* If A implies *B, we may replace *B by true. */
1672 if (implies_p (a
, *b
))
1673 *b
= const_true_rtx
;
1677 /* If *B implies A, we may replace *B by false. */
1678 if (implies_p (*b
, a
))
1687 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1688 operation we consider. */
1691 eliminate_implied_conditions (enum rtx_code op
, rtx
*head
, rtx tail
)
1695 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1696 eliminate_implied_condition (op
, *head
, &XEXP (elt
, 0));
1697 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1698 eliminate_implied_condition (op
, XEXP (elt
, 0), head
);
1701 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1702 is a list, its elements are assumed to be combined using OP. */
1705 simplify_using_initial_values (struct loop
*loop
, enum rtx_code op
, rtx
*expr
)
1707 rtx head
, tail
, insn
;
1715 if (CONSTANT_P (*expr
))
1718 if (GET_CODE (*expr
) == EXPR_LIST
)
1720 head
= XEXP (*expr
, 0);
1721 tail
= XEXP (*expr
, 1);
1723 eliminate_implied_conditions (op
, &head
, tail
);
1728 neutral
= const_true_rtx
;
1733 neutral
= const0_rtx
;
1734 aggr
= const_true_rtx
;
1741 simplify_using_initial_values (loop
, UNKNOWN
, &head
);
1744 XEXP (*expr
, 0) = aggr
;
1745 XEXP (*expr
, 1) = NULL_RTX
;
1748 else if (head
== neutral
)
1751 simplify_using_initial_values (loop
, op
, expr
);
1754 simplify_using_initial_values (loop
, op
, &tail
);
1756 if (tail
&& XEXP (tail
, 0) == aggr
)
1762 XEXP (*expr
, 0) = head
;
1763 XEXP (*expr
, 1) = tail
;
1767 gcc_assert (op
== UNKNOWN
);
1769 e
= loop_preheader_edge (loop
);
1770 if (e
->src
== ENTRY_BLOCK_PTR
)
1773 altered
= ALLOC_REG_SET (®_obstack
);
1777 insn
= BB_END (e
->src
);
1778 if (any_condjump_p (insn
))
1780 rtx cond
= get_condition (BB_END (e
->src
), NULL
, false, true);
1782 if (cond
&& (e
->flags
& EDGE_FALLTHRU
))
1783 cond
= reversed_condition (cond
);
1786 simplify_using_condition (cond
, expr
, altered
);
1787 if (CONSTANT_P (*expr
))
1789 FREE_REG_SET (altered
);
1795 FOR_BB_INSNS_REVERSE (e
->src
, insn
)
1800 simplify_using_assignment (insn
, expr
, altered
);
1801 if (CONSTANT_P (*expr
))
1803 FREE_REG_SET (altered
);
1808 if (!single_pred_p (e
->src
)
1809 || single_pred (e
->src
) == ENTRY_BLOCK_PTR
)
1811 e
= single_pred_edge (e
->src
);
1814 FREE_REG_SET (altered
);
1817 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1818 that IV occurs as left operands of comparison COND and its signedness
1819 is SIGNED_P to DESC. */
1822 shorten_into_mode (struct rtx_iv
*iv
, enum machine_mode mode
,
1823 enum rtx_code cond
, bool signed_p
, struct niter_desc
*desc
)
1825 rtx mmin
, mmax
, cond_over
, cond_under
;
1827 get_mode_bounds (mode
, signed_p
, iv
->extend_mode
, &mmin
, &mmax
);
1828 cond_under
= simplify_gen_relational (LT
, SImode
, iv
->extend_mode
,
1830 cond_over
= simplify_gen_relational (GT
, SImode
, iv
->extend_mode
,
1839 if (cond_under
!= const0_rtx
)
1841 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1842 if (cond_over
!= const0_rtx
)
1843 desc
->noloop_assumptions
=
1844 alloc_EXPR_LIST (0, cond_over
, desc
->noloop_assumptions
);
1851 if (cond_over
!= const0_rtx
)
1853 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1854 if (cond_under
!= const0_rtx
)
1855 desc
->noloop_assumptions
=
1856 alloc_EXPR_LIST (0, cond_under
, desc
->noloop_assumptions
);
1860 if (cond_over
!= const0_rtx
)
1862 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1863 if (cond_under
!= const0_rtx
)
1865 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1873 iv
->extend
= signed_p
? SIGN_EXTEND
: ZERO_EXTEND
;
1876 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1877 subregs of the same mode if possible (sometimes it is necessary to add
1878 some assumptions to DESC). */
1881 canonicalize_iv_subregs (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
,
1882 enum rtx_code cond
, struct niter_desc
*desc
)
1884 enum machine_mode comp_mode
;
1887 /* If the ivs behave specially in the first iteration, or are
1888 added/multiplied after extending, we ignore them. */
1889 if (iv0
->first_special
|| iv0
->mult
!= const1_rtx
|| iv0
->delta
!= const0_rtx
)
1891 if (iv1
->first_special
|| iv1
->mult
!= const1_rtx
|| iv1
->delta
!= const0_rtx
)
1894 /* If there is some extend, it must match signedness of the comparison. */
1899 if (iv0
->extend
== ZERO_EXTEND
1900 || iv1
->extend
== ZERO_EXTEND
)
1907 if (iv0
->extend
== SIGN_EXTEND
1908 || iv1
->extend
== SIGN_EXTEND
)
1914 if (iv0
->extend
!= UNKNOWN
1915 && iv1
->extend
!= UNKNOWN
1916 && iv0
->extend
!= iv1
->extend
)
1920 if (iv0
->extend
!= UNKNOWN
)
1921 signed_p
= iv0
->extend
== SIGN_EXTEND
;
1922 if (iv1
->extend
!= UNKNOWN
)
1923 signed_p
= iv1
->extend
== SIGN_EXTEND
;
1930 /* Values of both variables should be computed in the same mode. These
1931 might indeed be different, if we have comparison like
1933 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1935 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1936 in different modes. This does not seem impossible to handle, but
1937 it hardly ever occurs in practice.
1939 The only exception is the case when one of operands is invariant.
1940 For example pentium 3 generates comparisons like
1941 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1942 definitely do not want this prevent the optimization. */
1943 comp_mode
= iv0
->extend_mode
;
1944 if (GET_MODE_BITSIZE (comp_mode
) < GET_MODE_BITSIZE (iv1
->extend_mode
))
1945 comp_mode
= iv1
->extend_mode
;
1947 if (iv0
->extend_mode
!= comp_mode
)
1949 if (iv0
->mode
!= iv0
->extend_mode
1950 || iv0
->step
!= const0_rtx
)
1953 iv0
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1954 comp_mode
, iv0
->base
, iv0
->mode
);
1955 iv0
->extend_mode
= comp_mode
;
1958 if (iv1
->extend_mode
!= comp_mode
)
1960 if (iv1
->mode
!= iv1
->extend_mode
1961 || iv1
->step
!= const0_rtx
)
1964 iv1
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1965 comp_mode
, iv1
->base
, iv1
->mode
);
1966 iv1
->extend_mode
= comp_mode
;
1969 /* Check that both ivs belong to a range of a single mode. If one of the
1970 operands is an invariant, we may need to shorten it into the common
1972 if (iv0
->mode
== iv0
->extend_mode
1973 && iv0
->step
== const0_rtx
1974 && iv0
->mode
!= iv1
->mode
)
1975 shorten_into_mode (iv0
, iv1
->mode
, cond
, signed_p
, desc
);
1977 if (iv1
->mode
== iv1
->extend_mode
1978 && iv1
->step
== const0_rtx
1979 && iv0
->mode
!= iv1
->mode
)
1980 shorten_into_mode (iv1
, iv0
->mode
, swap_condition (cond
), signed_p
, desc
);
1982 if (iv0
->mode
!= iv1
->mode
)
1985 desc
->mode
= iv0
->mode
;
1986 desc
->signed_p
= signed_p
;
1991 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
1992 the result into DESC. Very similar to determine_number_of_iterations
1993 (basically its rtl version), complicated by things like subregs. */
1996 iv_number_of_iterations (struct loop
*loop
, rtx insn
, rtx condition
,
1997 struct niter_desc
*desc
)
1999 rtx op0
, op1
, delta
, step
, bound
, may_xform
, def_insn
, tmp
, tmp0
, tmp1
;
2000 struct rtx_iv iv0
, iv1
, tmp_iv
;
2001 rtx assumption
, may_not_xform
;
2003 enum machine_mode mode
, comp_mode
;
2004 rtx mmin
, mmax
, mode_mmin
, mode_mmax
;
2005 unsigned HOST_WIDEST_INT s
, size
, d
, inv
;
2006 HOST_WIDEST_INT up
, down
, inc
, step_val
;
2007 int was_sharp
= false;
2011 /* The meaning of these assumptions is this:
2013 then the rest of information does not have to be valid
2014 if noloop_assumptions then the loop does not roll
2015 if infinite then this exit is never used */
2017 desc
->assumptions
= NULL_RTX
;
2018 desc
->noloop_assumptions
= NULL_RTX
;
2019 desc
->infinite
= NULL_RTX
;
2020 desc
->simple_p
= true;
2022 desc
->const_iter
= false;
2023 desc
->niter_expr
= NULL_RTX
;
2024 desc
->niter_max
= 0;
2026 cond
= GET_CODE (condition
);
2027 gcc_assert (COMPARISON_P (condition
));
2029 mode
= GET_MODE (XEXP (condition
, 0));
2030 if (mode
== VOIDmode
)
2031 mode
= GET_MODE (XEXP (condition
, 1));
2032 /* The constant comparisons should be folded. */
2033 gcc_assert (mode
!= VOIDmode
);
2035 /* We only handle integers or pointers. */
2036 if (GET_MODE_CLASS (mode
) != MODE_INT
2037 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
2040 op0
= XEXP (condition
, 0);
2041 def_insn
= iv_get_reaching_def (insn
, op0
);
2042 if (!iv_analyze (def_insn
, op0
, &iv0
))
2044 if (iv0
.extend_mode
== VOIDmode
)
2045 iv0
.mode
= iv0
.extend_mode
= mode
;
2047 op1
= XEXP (condition
, 1);
2048 def_insn
= iv_get_reaching_def (insn
, op1
);
2049 if (!iv_analyze (def_insn
, op1
, &iv1
))
2051 if (iv1
.extend_mode
== VOIDmode
)
2052 iv1
.mode
= iv1
.extend_mode
= mode
;
2054 if (GET_MODE_BITSIZE (iv0
.extend_mode
) > HOST_BITS_PER_WIDE_INT
2055 || GET_MODE_BITSIZE (iv1
.extend_mode
) > HOST_BITS_PER_WIDE_INT
)
2058 /* Check condition and normalize it. */
2066 tmp_iv
= iv0
; iv0
= iv1
; iv1
= tmp_iv
;
2067 cond
= swap_condition (cond
);
2079 /* Handle extends. This is relatively nontrivial, so we only try in some
2080 easy cases, when we can canonicalize the ivs (possibly by adding some
2081 assumptions) to shape subreg (base + i * step). This function also fills
2082 in desc->mode and desc->signed_p. */
2084 if (!canonicalize_iv_subregs (&iv0
, &iv1
, cond
, desc
))
2087 comp_mode
= iv0
.extend_mode
;
2089 size
= GET_MODE_BITSIZE (mode
);
2090 get_mode_bounds (mode
, (cond
== LE
|| cond
== LT
), comp_mode
, &mmin
, &mmax
);
2091 mode_mmin
= lowpart_subreg (mode
, mmin
, comp_mode
);
2092 mode_mmax
= lowpart_subreg (mode
, mmax
, comp_mode
);
2094 if (GET_CODE (iv0
.step
) != CONST_INT
|| GET_CODE (iv1
.step
) != CONST_INT
)
2097 /* We can take care of the case of two induction variables chasing each other
2098 if the test is NE. I have never seen a loop using it, but still it is
2100 if (iv0
.step
!= const0_rtx
&& iv1
.step
!= const0_rtx
)
2105 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2106 iv1
.step
= const0_rtx
;
2109 /* This is either infinite loop or the one that ends immediately, depending
2110 on initial values. Unswitching should remove this kind of conditions. */
2111 if (iv0
.step
== const0_rtx
&& iv1
.step
== const0_rtx
)
2116 if (iv0
.step
== const0_rtx
)
2117 step_val
= -INTVAL (iv1
.step
);
2119 step_val
= INTVAL (iv0
.step
);
2121 /* Ignore loops of while (i-- < 10) type. */
2125 step_is_pow2
= !(step_val
& (step_val
- 1));
2129 /* We do not care about whether the step is power of two in this
2131 step_is_pow2
= false;
2135 /* Some more condition normalization. We must record some assumptions
2136 due to overflows. */
2141 /* We want to take care only of non-sharp relationals; this is easy,
2142 as in cases the overflow would make the transformation unsafe
2143 the loop does not roll. Seemingly it would make more sense to want
2144 to take care of sharp relationals instead, as NE is more similar to
2145 them, but the problem is that here the transformation would be more
2146 difficult due to possibly infinite loops. */
2147 if (iv0
.step
== const0_rtx
)
2149 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2150 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2152 if (assumption
== const_true_rtx
)
2153 goto zero_iter_simplify
;
2154 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2155 iv0
.base
, const1_rtx
);
2159 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2160 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2162 if (assumption
== const_true_rtx
)
2163 goto zero_iter_simplify
;
2164 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2165 iv1
.base
, constm1_rtx
);
2168 if (assumption
!= const0_rtx
)
2169 desc
->noloop_assumptions
=
2170 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2171 cond
= (cond
== LT
) ? LE
: LEU
;
2173 /* It will be useful to be able to tell the difference once more in
2174 LE -> NE reduction. */
2180 /* Take care of trivially infinite loops. */
2183 if (iv0
.step
== const0_rtx
)
2185 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2186 if (rtx_equal_p (tmp
, mode_mmin
))
2189 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2190 /* Fill in the remaining fields somehow. */
2191 goto zero_iter_simplify
;
2196 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2197 if (rtx_equal_p (tmp
, mode_mmax
))
2200 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2201 /* Fill in the remaining fields somehow. */
2202 goto zero_iter_simplify
;
2207 /* If we can we want to take care of NE conditions instead of size
2208 comparisons, as they are much more friendly (most importantly
2209 this takes care of special handling of loops with step 1). We can
2210 do it if we first check that upper bound is greater or equal to
2211 lower bound, their difference is constant c modulo step and that
2212 there is not an overflow. */
2215 if (iv0
.step
== const0_rtx
)
2216 step
= simplify_gen_unary (NEG
, comp_mode
, iv1
.step
, comp_mode
);
2219 delta
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2220 delta
= lowpart_subreg (mode
, delta
, comp_mode
);
2221 delta
= simplify_gen_binary (UMOD
, mode
, delta
, step
);
2222 may_xform
= const0_rtx
;
2223 may_not_xform
= const_true_rtx
;
2225 if (GET_CODE (delta
) == CONST_INT
)
2227 if (was_sharp
&& INTVAL (delta
) == INTVAL (step
) - 1)
2229 /* A special case. We have transformed condition of type
2230 for (i = 0; i < 4; i += 4)
2232 for (i = 0; i <= 3; i += 4)
2233 obviously if the test for overflow during that transformation
2234 passed, we cannot overflow here. Most importantly any
2235 loop with sharp end condition and step 1 falls into this
2236 category, so handling this case specially is definitely
2237 worth the troubles. */
2238 may_xform
= const_true_rtx
;
2240 else if (iv0
.step
== const0_rtx
)
2242 bound
= simplify_gen_binary (PLUS
, comp_mode
, mmin
, step
);
2243 bound
= simplify_gen_binary (MINUS
, comp_mode
, bound
, delta
);
2244 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2245 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2246 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2248 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2254 bound
= simplify_gen_binary (MINUS
, comp_mode
, mmax
, step
);
2255 bound
= simplify_gen_binary (PLUS
, comp_mode
, bound
, delta
);
2256 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2257 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2258 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2260 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2266 if (may_xform
!= const0_rtx
)
2268 /* We perform the transformation always provided that it is not
2269 completely senseless. This is OK, as we would need this assumption
2270 to determine the number of iterations anyway. */
2271 if (may_xform
!= const_true_rtx
)
2273 /* If the step is a power of two and the final value we have
2274 computed overflows, the cycle is infinite. Otherwise it
2275 is nontrivial to compute the number of iterations. */
2277 desc
->infinite
= alloc_EXPR_LIST (0, may_not_xform
,
2280 desc
->assumptions
= alloc_EXPR_LIST (0, may_xform
,
2284 /* We are going to lose some information about upper bound on
2285 number of iterations in this step, so record the information
2287 inc
= INTVAL (iv0
.step
) - INTVAL (iv1
.step
);
2288 if (GET_CODE (iv1
.base
) == CONST_INT
)
2289 up
= INTVAL (iv1
.base
);
2291 up
= INTVAL (mode_mmax
) - inc
;
2292 down
= INTVAL (GET_CODE (iv0
.base
) == CONST_INT
2295 desc
->niter_max
= (up
- down
) / inc
+ 1;
2297 if (iv0
.step
== const0_rtx
)
2299 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, delta
);
2300 iv0
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.base
, step
);
2304 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, delta
);
2305 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, step
);
2308 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2309 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2310 assumption
= simplify_gen_relational (reverse_condition (cond
),
2311 SImode
, mode
, tmp0
, tmp1
);
2312 if (assumption
== const_true_rtx
)
2313 goto zero_iter_simplify
;
2314 else if (assumption
!= const0_rtx
)
2315 desc
->noloop_assumptions
=
2316 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2321 /* Count the number of iterations. */
2324 /* Everything we do here is just arithmetics modulo size of mode. This
2325 makes us able to do more involved computations of number of iterations
2326 than in other cases. First transform the condition into shape
2327 s * i <> c, with s positive. */
2328 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2329 iv0
.base
= const0_rtx
;
2330 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2331 iv1
.step
= const0_rtx
;
2332 if (INTVAL (iv0
.step
) < 0)
2334 iv0
.step
= simplify_gen_unary (NEG
, comp_mode
, iv0
.step
, mode
);
2335 iv1
.base
= simplify_gen_unary (NEG
, comp_mode
, iv1
.base
, mode
);
2337 iv0
.step
= lowpart_subreg (mode
, iv0
.step
, comp_mode
);
2339 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2340 is infinite. Otherwise, the number of iterations is
2341 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2342 s
= INTVAL (iv0
.step
); d
= 1;
2349 bound
= GEN_INT (((unsigned HOST_WIDEST_INT
) 1 << (size
- 1 ) << 1) - 1);
2351 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2352 tmp
= simplify_gen_binary (UMOD
, mode
, tmp1
, GEN_INT (d
));
2353 assumption
= simplify_gen_relational (NE
, SImode
, mode
, tmp
, const0_rtx
);
2354 desc
->infinite
= alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2356 tmp
= simplify_gen_binary (UDIV
, mode
, tmp1
, GEN_INT (d
));
2357 inv
= inverse (s
, size
);
2358 tmp
= simplify_gen_binary (MULT
, mode
, tmp
, gen_int_mode (inv
, mode
));
2359 desc
->niter_expr
= simplify_gen_binary (AND
, mode
, tmp
, bound
);
2363 if (iv1
.step
== const0_rtx
)
2364 /* Condition in shape a + s * i <= b
2365 We must know that b + s does not overflow and a <= b + s and then we
2366 can compute number of iterations as (b + s - a) / s. (It might
2367 seem that we in fact could be more clever about testing the b + s
2368 overflow condition using some information about b - a mod s,
2369 but it was already taken into account during LE -> NE transform). */
2372 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2373 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2375 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmax
,
2376 lowpart_subreg (mode
, step
,
2382 /* If s is power of 2, we know that the loop is infinite if
2383 a % s <= b % s and b + s overflows. */
2384 assumption
= simplify_gen_relational (reverse_condition (cond
),
2388 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2389 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2390 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2391 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2393 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2397 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2400 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2403 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, iv0
.step
);
2404 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2405 assumption
= simplify_gen_relational (reverse_condition (cond
),
2406 SImode
, mode
, tmp0
, tmp
);
2408 delta
= simplify_gen_binary (PLUS
, mode
, tmp1
, step
);
2409 delta
= simplify_gen_binary (MINUS
, mode
, delta
, tmp0
);
2413 /* Condition in shape a <= b - s * i
2414 We must know that a - s does not overflow and a - s <= b and then
2415 we can again compute number of iterations as (b - (a - s)) / s. */
2416 step
= simplify_gen_unary (NEG
, mode
, iv1
.step
, mode
);
2417 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2418 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2420 bound
= simplify_gen_binary (PLUS
, mode
, mode_mmin
,
2421 lowpart_subreg (mode
, step
, comp_mode
));
2426 /* If s is power of 2, we know that the loop is infinite if
2427 a % s <= b % s and a - s overflows. */
2428 assumption
= simplify_gen_relational (reverse_condition (cond
),
2432 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2433 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2434 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2435 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2437 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2441 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2444 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2447 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, iv1
.step
);
2448 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2449 assumption
= simplify_gen_relational (reverse_condition (cond
),
2452 delta
= simplify_gen_binary (MINUS
, mode
, tmp0
, step
);
2453 delta
= simplify_gen_binary (MINUS
, mode
, tmp1
, delta
);
2455 if (assumption
== const_true_rtx
)
2456 goto zero_iter_simplify
;
2457 else if (assumption
!= const0_rtx
)
2458 desc
->noloop_assumptions
=
2459 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2460 delta
= simplify_gen_binary (UDIV
, mode
, delta
, step
);
2461 desc
->niter_expr
= delta
;
2464 old_niter
= desc
->niter_expr
;
2466 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2467 if (desc
->assumptions
2468 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2470 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2471 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2472 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2474 /* Rerun the simplification. Consider code (created by copying loop headers)
2486 The first pass determines that i = 0, the second pass uses it to eliminate
2487 noloop assumption. */
2489 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2490 if (desc
->assumptions
2491 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2493 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2494 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2495 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2497 if (desc
->noloop_assumptions
2498 && XEXP (desc
->noloop_assumptions
, 0) == const_true_rtx
)
2501 if (GET_CODE (desc
->niter_expr
) == CONST_INT
)
2503 unsigned HOST_WIDEST_INT val
= INTVAL (desc
->niter_expr
);
2505 desc
->const_iter
= true;
2506 desc
->niter_max
= desc
->niter
= val
& GET_MODE_MASK (desc
->mode
);
2510 if (!desc
->niter_max
)
2511 desc
->niter_max
= determine_max_iter (desc
);
2513 /* simplify_using_initial_values does a copy propagation on the registers
2514 in the expression for the number of iterations. This prolongs life
2515 ranges of registers and increases register pressure, and usually
2516 brings no gain (and if it happens to do, the cse pass will take care
2517 of it anyway). So prevent this behavior, unless it enabled us to
2518 derive that the number of iterations is a constant. */
2519 desc
->niter_expr
= old_niter
;
2525 /* Simplify the assumptions. */
2526 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2527 if (desc
->assumptions
2528 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2530 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2534 desc
->const_iter
= true;
2536 desc
->niter_max
= 0;
2537 desc
->noloop_assumptions
= NULL_RTX
;
2538 desc
->niter_expr
= const0_rtx
;
2542 desc
->simple_p
= false;
2546 /* Checks whether E is a simple exit from LOOP and stores its description
2550 check_simple_exit (struct loop
*loop
, edge e
, struct niter_desc
*desc
)
2552 basic_block exit_bb
;
2557 desc
->simple_p
= false;
2559 /* It must belong directly to the loop. */
2560 if (exit_bb
->loop_father
!= loop
)
2563 /* It must be tested (at least) once during any iteration. */
2564 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit_bb
))
2567 /* It must end in a simple conditional jump. */
2568 if (!any_condjump_p (BB_END (exit_bb
)))
2571 ein
= EDGE_SUCC (exit_bb
, 0);
2573 ein
= EDGE_SUCC (exit_bb
, 1);
2576 desc
->in_edge
= ein
;
2578 /* Test whether the condition is suitable. */
2579 if (!(condition
= get_condition (BB_END (ein
->src
), &at
, false, false)))
2582 if (ein
->flags
& EDGE_FALLTHRU
)
2584 condition
= reversed_condition (condition
);
2589 /* Check that we are able to determine number of iterations and fill
2590 in information about it. */
2591 iv_number_of_iterations (loop
, at
, condition
, desc
);
2594 /* Finds a simple exit of LOOP and stores its description into DESC. */
2597 find_simple_exit (struct loop
*loop
, struct niter_desc
*desc
)
2602 struct niter_desc act
;
2606 desc
->simple_p
= false;
2607 body
= get_loop_body (loop
);
2609 for (i
= 0; i
< loop
->num_nodes
; i
++)
2611 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
2613 if (flow_bb_inside_loop_p (loop
, e
->dest
))
2616 check_simple_exit (loop
, e
, &act
);
2624 /* Prefer constant iterations; the less the better. */
2626 || (desc
->const_iter
&& act
.niter
>= desc
->niter
))
2629 /* Also if the actual exit may be infinite, while the old one
2630 not, prefer the old one. */
2631 if (act
.infinite
&& !desc
->infinite
)
2643 fprintf (dump_file
, "Loop %d is simple:\n", loop
->num
);
2644 fprintf (dump_file
, " simple exit %d -> %d\n",
2645 desc
->out_edge
->src
->index
,
2646 desc
->out_edge
->dest
->index
);
2647 if (desc
->assumptions
)
2649 fprintf (dump_file
, " assumptions: ");
2650 print_rtl (dump_file
, desc
->assumptions
);
2651 fprintf (dump_file
, "\n");
2653 if (desc
->noloop_assumptions
)
2655 fprintf (dump_file
, " does not roll if: ");
2656 print_rtl (dump_file
, desc
->noloop_assumptions
);
2657 fprintf (dump_file
, "\n");
2661 fprintf (dump_file
, " infinite if: ");
2662 print_rtl (dump_file
, desc
->infinite
);
2663 fprintf (dump_file
, "\n");
2666 fprintf (dump_file
, " number of iterations: ");
2667 print_rtl (dump_file
, desc
->niter_expr
);
2668 fprintf (dump_file
, "\n");
2670 fprintf (dump_file
, " upper bound: ");
2671 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
, desc
->niter_max
);
2672 fprintf (dump_file
, "\n");
2675 fprintf (dump_file
, "Loop %d is not simple.\n", loop
->num
);
2681 /* Creates a simple loop description of LOOP if it was not computed
2685 get_simple_loop_desc (struct loop
*loop
)
2687 struct niter_desc
*desc
= simple_loop_desc (loop
);
2692 desc
= xmalloc (sizeof (struct niter_desc
));
2693 iv_analysis_loop_init (loop
);
2694 find_simple_exit (loop
, desc
);
2697 if (desc
->simple_p
&& (desc
->assumptions
|| desc
->infinite
))
2699 const char *wording
;
2701 /* Assume that no overflow happens and that the loop is finite.
2702 We already warned at the tree level if we ran optimizations there. */
2703 if (!flag_tree_loop_optimize
&& warn_unsafe_loop_optimizations
)
2708 flag_unsafe_loop_optimizations
2709 ? N_("assuming that the loop is not infinite")
2710 : N_("cannot optimize possibly infinite loops");
2711 warning (OPT_Wunsafe_loop_optimizations
, "%s",
2714 if (desc
->assumptions
)
2717 flag_unsafe_loop_optimizations
2718 ? N_("assuming that the loop counter does not overflow")
2719 : N_("cannot optimize loop, the loop counter may overflow");
2720 warning (OPT_Wunsafe_loop_optimizations
, "%s",
2725 if (flag_unsafe_loop_optimizations
)
2727 desc
->assumptions
= NULL_RTX
;
2728 desc
->infinite
= NULL_RTX
;
2735 /* Releases simple loop description for LOOP. */
2738 free_simple_loop_desc (struct loop
*loop
)
2740 struct niter_desc
*desc
= simple_loop_desc (loop
);