1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This is a simple analysis of induction variables of the loop. The major use
22 is for determining the number of iterations of a loop for loop unrolling,
23 doloop optimization and branch prediction. The iv information is computed
26 Induction variables are analyzed by walking the use-def chains. When
27 a basic induction variable (biv) is found, it is cached in the bivs
28 hash table. When register is proved to be a biv, its description
29 is stored to DF_REF_DATA of the def reference.
31 The analysis works always with one loop -- you must call
32 iv_analysis_loop_init (loop) for it. All the other functions then work with
33 this loop. When you need to work with another loop, just call
34 iv_analysis_loop_init for it. When you no longer need iv analysis, call
35 iv_analysis_done () to clean up the memory.
37 The available functions are:
39 iv_analyze (insn, reg, iv): Stores the description of the induction variable
40 corresponding to the use of register REG in INSN to IV. Returns true if
41 REG is an induction variable in INSN. false otherwise.
42 If use of REG is not found in INSN, following insns are scanned (so that
43 we may call this function on insn returned by get_condition).
44 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
45 corresponding to DEF, which is a register defined in INSN.
46 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
47 corresponding to expression EXPR evaluated at INSN. All registers used bu
48 EXPR must also be used in INSN.
53 #include "coretypes.h"
56 #include "hard-reg-set.h"
58 #include "basic-block.h"
67 /* Possible return values of iv_get_reaching_def. */
71 /* More than one reaching def, or reaching def that does not
75 /* The use is trivial invariant of the loop, i.e. is not changed
79 /* The use is reached by initial value and a value from the
80 previous iteration. */
83 /* The use has single dominating def. */
87 /* Information about a biv. */
91 unsigned regno
; /* The register of the biv. */
92 struct rtx_iv iv
; /* Value of the biv. */
95 static bool clean_slate
= true;
97 static unsigned int iv_ref_table_size
= 0;
99 /* Table of rtx_ivs indexed by the df_ref uid field. */
100 static struct rtx_iv
** iv_ref_table
;
102 /* Induction variable stored at the reference. */
103 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID(REF)]
104 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID(REF)] = (IV)
106 /* The current loop. */
108 static struct loop
*current_loop
;
110 /* Bivs of the current loop. */
114 static bool iv_analyze_op (rtx
, rtx
, struct rtx_iv
*);
116 /* Dumps information about IV to FILE. */
118 extern void dump_iv_info (FILE *, struct rtx_iv
*);
120 dump_iv_info (FILE *file
, struct rtx_iv
*iv
)
124 fprintf (file
, "not simple");
128 if (iv
->step
== const0_rtx
129 && !iv
->first_special
)
130 fprintf (file
, "invariant ");
132 print_rtl (file
, iv
->base
);
133 if (iv
->step
!= const0_rtx
)
135 fprintf (file
, " + ");
136 print_rtl (file
, iv
->step
);
137 fprintf (file
, " * iteration");
139 fprintf (file
, " (in %s)", GET_MODE_NAME (iv
->mode
));
141 if (iv
->mode
!= iv
->extend_mode
)
142 fprintf (file
, " %s to %s",
143 rtx_name
[iv
->extend
],
144 GET_MODE_NAME (iv
->extend_mode
));
146 if (iv
->mult
!= const1_rtx
)
148 fprintf (file
, " * ");
149 print_rtl (file
, iv
->mult
);
151 if (iv
->delta
!= const0_rtx
)
153 fprintf (file
, " + ");
154 print_rtl (file
, iv
->delta
);
156 if (iv
->first_special
)
157 fprintf (file
, " (first special)");
160 /* Generates a subreg to get the least significant part of EXPR (in mode
161 INNER_MODE) to OUTER_MODE. */
164 lowpart_subreg (enum machine_mode outer_mode
, rtx expr
,
165 enum machine_mode inner_mode
)
167 return simplify_gen_subreg (outer_mode
, expr
, inner_mode
,
168 subreg_lowpart_offset (outer_mode
, inner_mode
));
172 check_iv_ref_table_size (void)
174 if (iv_ref_table_size
< DF_DEFS_TABLE_SIZE())
176 unsigned int new_size
= DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
177 iv_ref_table
= XRESIZEVEC (struct rtx_iv
*, iv_ref_table
, new_size
);
178 memset (&iv_ref_table
[iv_ref_table_size
], 0,
179 (new_size
- iv_ref_table_size
) * sizeof (struct rtx_iv
*));
180 iv_ref_table_size
= new_size
;
185 /* Checks whether REG is a well-behaved register. */
188 simple_reg_p (rtx reg
)
192 if (GET_CODE (reg
) == SUBREG
)
194 if (!subreg_lowpart_p (reg
))
196 reg
= SUBREG_REG (reg
);
203 if (HARD_REGISTER_NUM_P (r
))
206 if (GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
212 /* Clears the information about ivs stored in df. */
217 unsigned i
, n_defs
= DF_DEFS_TABLE_SIZE ();
220 check_iv_ref_table_size ();
221 for (i
= 0; i
< n_defs
; i
++)
223 iv
= iv_ref_table
[i
];
227 iv_ref_table
[i
] = NULL
;
234 /* Returns hash value for biv B. */
237 biv_hash (const void *b
)
239 return ((const struct biv_entry
*) b
)->regno
;
242 /* Compares biv B and register R. */
245 biv_eq (const void *b
, const void *r
)
247 return ((const struct biv_entry
*) b
)->regno
== REGNO ((const_rtx
) r
);
250 /* Prepare the data for an induction variable analysis of a LOOP. */
253 iv_analysis_loop_init (struct loop
*loop
)
255 basic_block
*body
= get_loop_body_in_dom_order (loop
), bb
;
256 bitmap blocks
= BITMAP_ALLOC (NULL
);
261 /* Clear the information from the analysis of the previous loop. */
264 df_set_flags (DF_EQ_NOTES
+ DF_DEFER_INSN_RESCAN
);
265 bivs
= htab_create (10, biv_hash
, biv_eq
, free
);
271 for (i
= 0; i
< loop
->num_nodes
; i
++)
274 bitmap_set_bit (blocks
, bb
->index
);
276 /* Get rid of the ud chains before processing the rescans. Then add
278 df_remove_problem (df_chain
);
279 df_process_deferred_rescans ();
280 df_chain_add_problem (DF_UD_CHAIN
);
281 df_set_blocks (blocks
);
284 df_dump_region (dump_file
);
286 check_iv_ref_table_size ();
287 BITMAP_FREE (blocks
);
291 /* Finds the definition of REG that dominates loop latch and stores
292 it to DEF. Returns false if there is not a single definition
293 dominating the latch. If REG has no definition in loop, DEF
294 is set to NULL and true is returned. */
297 latch_dominating_def (rtx reg
, df_ref
*def
)
299 df_ref single_rd
= NULL
, adef
;
300 unsigned regno
= REGNO (reg
);
301 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (current_loop
->latch
);
303 for (adef
= DF_REG_DEF_CHAIN (regno
); adef
; adef
= DF_REF_NEXT_REG (adef
))
305 if (!bitmap_bit_p (df
->blocks_to_analyze
, DF_REF_BBNO (adef
))
306 || !bitmap_bit_p (bb_info
->out
, DF_REF_ID (adef
)))
309 /* More than one reaching definition. */
313 if (!just_once_each_iteration_p (current_loop
, DF_REF_BB (adef
)))
323 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
325 static enum iv_grd_result
326 iv_get_reaching_def (rtx insn
, rtx reg
, df_ref
*def
)
329 basic_block def_bb
, use_bb
;
334 if (!simple_reg_p (reg
))
336 if (GET_CODE (reg
) == SUBREG
)
337 reg
= SUBREG_REG (reg
);
338 gcc_assert (REG_P (reg
));
340 use
= df_find_use (insn
, reg
);
341 gcc_assert (use
!= NULL
);
343 if (!DF_REF_CHAIN (use
))
344 return GRD_INVARIANT
;
346 /* More than one reaching def. */
347 if (DF_REF_CHAIN (use
)->next
)
350 adef
= DF_REF_CHAIN (use
)->ref
;
352 /* We do not handle setting only part of the register. */
353 if (DF_REF_FLAGS (adef
) & DF_REF_READ_WRITE
)
356 def_insn
= DF_REF_INSN (adef
);
357 def_bb
= DF_REF_BB (adef
);
358 use_bb
= BLOCK_FOR_INSN (insn
);
360 if (use_bb
== def_bb
)
361 dom_p
= (DF_INSN_LUID (def_insn
) < DF_INSN_LUID (insn
));
363 dom_p
= dominated_by_p (CDI_DOMINATORS
, use_bb
, def_bb
);
368 return GRD_SINGLE_DOM
;
371 /* The definition does not dominate the use. This is still OK if
372 this may be a use of a biv, i.e. if the def_bb dominates loop
374 if (just_once_each_iteration_p (current_loop
, def_bb
))
375 return GRD_MAYBE_BIV
;
380 /* Sets IV to invariant CST in MODE. Always returns true (just for
381 consistency with other iv manipulation functions that may fail). */
384 iv_constant (struct rtx_iv
*iv
, rtx cst
, enum machine_mode mode
)
386 if (mode
== VOIDmode
)
387 mode
= GET_MODE (cst
);
391 iv
->step
= const0_rtx
;
392 iv
->first_special
= false;
393 iv
->extend
= UNKNOWN
;
394 iv
->extend_mode
= iv
->mode
;
395 iv
->delta
= const0_rtx
;
396 iv
->mult
= const1_rtx
;
401 /* Evaluates application of subreg to MODE on IV. */
404 iv_subreg (struct rtx_iv
*iv
, enum machine_mode mode
)
406 /* If iv is invariant, just calculate the new value. */
407 if (iv
->step
== const0_rtx
408 && !iv
->first_special
)
410 rtx val
= get_iv_value (iv
, const0_rtx
);
411 val
= lowpart_subreg (mode
, val
, iv
->extend_mode
);
414 iv
->extend
= UNKNOWN
;
415 iv
->mode
= iv
->extend_mode
= mode
;
416 iv
->delta
= const0_rtx
;
417 iv
->mult
= const1_rtx
;
421 if (iv
->extend_mode
== mode
)
424 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (iv
->mode
))
427 iv
->extend
= UNKNOWN
;
430 iv
->base
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
431 simplify_gen_binary (MULT
, iv
->extend_mode
,
432 iv
->base
, iv
->mult
));
433 iv
->step
= simplify_gen_binary (MULT
, iv
->extend_mode
, iv
->step
, iv
->mult
);
434 iv
->mult
= const1_rtx
;
435 iv
->delta
= const0_rtx
;
436 iv
->first_special
= false;
441 /* Evaluates application of EXTEND to MODE on IV. */
444 iv_extend (struct rtx_iv
*iv
, enum rtx_code extend
, enum machine_mode mode
)
446 /* If iv is invariant, just calculate the new value. */
447 if (iv
->step
== const0_rtx
448 && !iv
->first_special
)
450 rtx val
= get_iv_value (iv
, const0_rtx
);
451 val
= simplify_gen_unary (extend
, mode
, val
, iv
->extend_mode
);
454 iv
->extend
= UNKNOWN
;
455 iv
->mode
= iv
->extend_mode
= mode
;
456 iv
->delta
= const0_rtx
;
457 iv
->mult
= const1_rtx
;
461 if (mode
!= iv
->extend_mode
)
464 if (iv
->extend
!= UNKNOWN
465 && iv
->extend
!= extend
)
473 /* Evaluates negation of IV. */
476 iv_neg (struct rtx_iv
*iv
)
478 if (iv
->extend
== UNKNOWN
)
480 iv
->base
= simplify_gen_unary (NEG
, iv
->extend_mode
,
481 iv
->base
, iv
->extend_mode
);
482 iv
->step
= simplify_gen_unary (NEG
, iv
->extend_mode
,
483 iv
->step
, iv
->extend_mode
);
487 iv
->delta
= simplify_gen_unary (NEG
, iv
->extend_mode
,
488 iv
->delta
, iv
->extend_mode
);
489 iv
->mult
= simplify_gen_unary (NEG
, iv
->extend_mode
,
490 iv
->mult
, iv
->extend_mode
);
496 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
499 iv_add (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
, enum rtx_code op
)
501 enum machine_mode mode
;
504 /* Extend the constant to extend_mode of the other operand if necessary. */
505 if (iv0
->extend
== UNKNOWN
506 && iv0
->mode
== iv0
->extend_mode
507 && iv0
->step
== const0_rtx
508 && GET_MODE_SIZE (iv0
->extend_mode
) < GET_MODE_SIZE (iv1
->extend_mode
))
510 iv0
->extend_mode
= iv1
->extend_mode
;
511 iv0
->base
= simplify_gen_unary (ZERO_EXTEND
, iv0
->extend_mode
,
512 iv0
->base
, iv0
->mode
);
514 if (iv1
->extend
== UNKNOWN
515 && iv1
->mode
== iv1
->extend_mode
516 && iv1
->step
== const0_rtx
517 && GET_MODE_SIZE (iv1
->extend_mode
) < GET_MODE_SIZE (iv0
->extend_mode
))
519 iv1
->extend_mode
= iv0
->extend_mode
;
520 iv1
->base
= simplify_gen_unary (ZERO_EXTEND
, iv1
->extend_mode
,
521 iv1
->base
, iv1
->mode
);
524 mode
= iv0
->extend_mode
;
525 if (mode
!= iv1
->extend_mode
)
528 if (iv0
->extend
== UNKNOWN
&& iv1
->extend
== UNKNOWN
)
530 if (iv0
->mode
!= iv1
->mode
)
533 iv0
->base
= simplify_gen_binary (op
, mode
, iv0
->base
, iv1
->base
);
534 iv0
->step
= simplify_gen_binary (op
, mode
, iv0
->step
, iv1
->step
);
539 /* Handle addition of constant. */
540 if (iv1
->extend
== UNKNOWN
542 && iv1
->step
== const0_rtx
)
544 iv0
->delta
= simplify_gen_binary (op
, mode
, iv0
->delta
, iv1
->base
);
548 if (iv0
->extend
== UNKNOWN
550 && iv0
->step
== const0_rtx
)
558 iv0
->delta
= simplify_gen_binary (PLUS
, mode
, iv0
->delta
, arg
);
565 /* Evaluates multiplication of IV by constant CST. */
568 iv_mult (struct rtx_iv
*iv
, rtx mby
)
570 enum machine_mode mode
= iv
->extend_mode
;
572 if (GET_MODE (mby
) != VOIDmode
573 && GET_MODE (mby
) != mode
)
576 if (iv
->extend
== UNKNOWN
)
578 iv
->base
= simplify_gen_binary (MULT
, mode
, iv
->base
, mby
);
579 iv
->step
= simplify_gen_binary (MULT
, mode
, iv
->step
, mby
);
583 iv
->delta
= simplify_gen_binary (MULT
, mode
, iv
->delta
, mby
);
584 iv
->mult
= simplify_gen_binary (MULT
, mode
, iv
->mult
, mby
);
590 /* Evaluates shift of IV by constant CST. */
593 iv_shift (struct rtx_iv
*iv
, rtx mby
)
595 enum machine_mode mode
= iv
->extend_mode
;
597 if (GET_MODE (mby
) != VOIDmode
598 && GET_MODE (mby
) != mode
)
601 if (iv
->extend
== UNKNOWN
)
603 iv
->base
= simplify_gen_binary (ASHIFT
, mode
, iv
->base
, mby
);
604 iv
->step
= simplify_gen_binary (ASHIFT
, mode
, iv
->step
, mby
);
608 iv
->delta
= simplify_gen_binary (ASHIFT
, mode
, iv
->delta
, mby
);
609 iv
->mult
= simplify_gen_binary (ASHIFT
, mode
, iv
->mult
, mby
);
615 /* The recursive part of get_biv_step. Gets the value of the single value
616 defined by DEF wrto initial value of REG inside loop, in shape described
620 get_biv_step_1 (df_ref def
, rtx reg
,
621 rtx
*inner_step
, enum machine_mode
*inner_mode
,
622 enum rtx_code
*extend
, enum machine_mode outer_mode
,
625 rtx set
, rhs
, op0
= NULL_RTX
, op1
= NULL_RTX
;
626 rtx next
, nextr
, tmp
;
628 rtx insn
= DF_REF_INSN (def
);
630 enum iv_grd_result res
;
632 set
= single_set (insn
);
636 rhs
= find_reg_equal_equiv_note (insn
);
642 code
= GET_CODE (rhs
);
655 if (code
== PLUS
&& CONSTANT_P (op0
))
657 tmp
= op0
; op0
= op1
; op1
= tmp
;
660 if (!simple_reg_p (op0
)
661 || !CONSTANT_P (op1
))
664 if (GET_MODE (rhs
) != outer_mode
)
666 /* ppc64 uses expressions like
668 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
670 this is equivalent to
672 (set x':DI (plus:DI y:DI 1))
673 (set x:SI (subreg:SI (x':DI)). */
674 if (GET_CODE (op0
) != SUBREG
)
676 if (GET_MODE (SUBREG_REG (op0
)) != outer_mode
)
685 if (GET_MODE (rhs
) != outer_mode
)
689 if (!simple_reg_p (op0
))
699 if (GET_CODE (next
) == SUBREG
)
701 if (!subreg_lowpart_p (next
))
704 nextr
= SUBREG_REG (next
);
705 if (GET_MODE (nextr
) != outer_mode
)
711 res
= iv_get_reaching_def (insn
, nextr
, &next_def
);
713 if (res
== GRD_INVALID
|| res
== GRD_INVARIANT
)
716 if (res
== GRD_MAYBE_BIV
)
718 if (!rtx_equal_p (nextr
, reg
))
721 *inner_step
= const0_rtx
;
723 *inner_mode
= outer_mode
;
724 *outer_step
= const0_rtx
;
726 else if (!get_biv_step_1 (next_def
, reg
,
727 inner_step
, inner_mode
, extend
, outer_mode
,
731 if (GET_CODE (next
) == SUBREG
)
733 enum machine_mode amode
= GET_MODE (next
);
735 if (GET_MODE_SIZE (amode
) > GET_MODE_SIZE (*inner_mode
))
739 *inner_step
= simplify_gen_binary (PLUS
, outer_mode
,
740 *inner_step
, *outer_step
);
741 *outer_step
= const0_rtx
;
753 if (*inner_mode
== outer_mode
754 /* See comment in previous switch. */
755 || GET_MODE (rhs
) != outer_mode
)
756 *inner_step
= simplify_gen_binary (code
, outer_mode
,
759 *outer_step
= simplify_gen_binary (code
, outer_mode
,
765 gcc_assert (GET_MODE (op0
) == *inner_mode
766 && *extend
== UNKNOWN
767 && *outer_step
== const0_rtx
);
779 /* Gets the operation on register REG inside loop, in shape
781 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
783 If the operation cannot be described in this shape, return false.
784 LAST_DEF is the definition of REG that dominates loop latch. */
787 get_biv_step (df_ref last_def
, rtx reg
, rtx
*inner_step
,
788 enum machine_mode
*inner_mode
, enum rtx_code
*extend
,
789 enum machine_mode
*outer_mode
, rtx
*outer_step
)
791 *outer_mode
= GET_MODE (reg
);
793 if (!get_biv_step_1 (last_def
, reg
,
794 inner_step
, inner_mode
, extend
, *outer_mode
,
798 gcc_assert ((*inner_mode
== *outer_mode
) != (*extend
!= UNKNOWN
));
799 gcc_assert (*inner_mode
!= *outer_mode
|| *outer_step
== const0_rtx
);
804 /* Records information that DEF is induction variable IV. */
807 record_iv (df_ref def
, struct rtx_iv
*iv
)
809 struct rtx_iv
*recorded_iv
= XNEW (struct rtx_iv
);
812 check_iv_ref_table_size ();
813 DF_REF_IV_SET (def
, recorded_iv
);
816 /* If DEF was already analyzed for bivness, store the description of the biv to
817 IV and return true. Otherwise return false. */
820 analyzed_for_bivness_p (rtx def
, struct rtx_iv
*iv
)
822 struct biv_entry
*biv
=
823 (struct biv_entry
*) htab_find_with_hash (bivs
, def
, REGNO (def
));
833 record_biv (rtx def
, struct rtx_iv
*iv
)
835 struct biv_entry
*biv
= XNEW (struct biv_entry
);
836 void **slot
= htab_find_slot_with_hash (bivs
, def
, REGNO (def
), INSERT
);
838 biv
->regno
= REGNO (def
);
844 /* Determines whether DEF is a biv and if so, stores its description
848 iv_analyze_biv (rtx def
, struct rtx_iv
*iv
)
850 rtx inner_step
, outer_step
;
851 enum machine_mode inner_mode
, outer_mode
;
852 enum rtx_code extend
;
857 fprintf (dump_file
, "Analyzing ");
858 print_rtl (dump_file
, def
);
859 fprintf (dump_file
, " for bivness.\n");
864 if (!CONSTANT_P (def
))
867 return iv_constant (iv
, def
, VOIDmode
);
870 if (!latch_dominating_def (def
, &last_def
))
873 fprintf (dump_file
, " not simple.\n");
878 return iv_constant (iv
, def
, VOIDmode
);
880 if (analyzed_for_bivness_p (def
, iv
))
883 fprintf (dump_file
, " already analysed.\n");
884 return iv
->base
!= NULL_RTX
;
887 if (!get_biv_step (last_def
, def
, &inner_step
, &inner_mode
, &extend
,
888 &outer_mode
, &outer_step
))
894 /* Loop transforms base to es (base + inner_step) + outer_step,
895 where es means extend of subreg between inner_mode and outer_mode.
896 The corresponding induction variable is
898 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
900 iv
->base
= simplify_gen_binary (MINUS
, outer_mode
, def
, outer_step
);
901 iv
->step
= simplify_gen_binary (PLUS
, outer_mode
, inner_step
, outer_step
);
902 iv
->mode
= inner_mode
;
903 iv
->extend_mode
= outer_mode
;
905 iv
->mult
= const1_rtx
;
906 iv
->delta
= outer_step
;
907 iv
->first_special
= inner_mode
!= outer_mode
;
912 fprintf (dump_file
, " ");
913 dump_iv_info (dump_file
, iv
);
914 fprintf (dump_file
, "\n");
917 record_biv (def
, iv
);
918 return iv
->base
!= NULL_RTX
;
921 /* Analyzes expression RHS used at INSN and stores the result to *IV.
922 The mode of the induction variable is MODE. */
925 iv_analyze_expr (rtx insn
, rtx rhs
, enum machine_mode mode
, struct rtx_iv
*iv
)
927 rtx mby
= NULL_RTX
, tmp
;
928 rtx op0
= NULL_RTX
, op1
= NULL_RTX
;
929 struct rtx_iv iv0
, iv1
;
930 enum rtx_code code
= GET_CODE (rhs
);
931 enum machine_mode omode
= mode
;
937 gcc_assert (GET_MODE (rhs
) == mode
|| GET_MODE (rhs
) == VOIDmode
);
943 if (!iv_analyze_op (insn
, rhs
, iv
))
946 if (iv
->mode
== VOIDmode
)
949 iv
->extend_mode
= mode
;
965 omode
= GET_MODE (op0
);
977 if (!CONSTANT_P (mby
))
983 if (!CONSTANT_P (mby
))
990 if (!CONSTANT_P (mby
))
999 && !iv_analyze_expr (insn
, op0
, omode
, &iv0
))
1003 && !iv_analyze_expr (insn
, op1
, omode
, &iv1
))
1010 if (!iv_extend (&iv0
, code
, mode
))
1021 if (!iv_add (&iv0
, &iv1
, code
))
1026 if (!iv_mult (&iv0
, mby
))
1031 if (!iv_shift (&iv0
, mby
))
1040 return iv
->base
!= NULL_RTX
;
1043 /* Analyzes iv DEF and stores the result to *IV. */
1046 iv_analyze_def (df_ref def
, struct rtx_iv
*iv
)
1048 rtx insn
= DF_REF_INSN (def
);
1049 rtx reg
= DF_REF_REG (def
);
1054 fprintf (dump_file
, "Analyzing def of ");
1055 print_rtl (dump_file
, reg
);
1056 fprintf (dump_file
, " in insn ");
1057 print_rtl_single (dump_file
, insn
);
1060 check_iv_ref_table_size ();
1061 if (DF_REF_IV (def
))
1064 fprintf (dump_file
, " already analysed.\n");
1065 *iv
= *DF_REF_IV (def
);
1066 return iv
->base
!= NULL_RTX
;
1069 iv
->mode
= VOIDmode
;
1070 iv
->base
= NULL_RTX
;
1071 iv
->step
= NULL_RTX
;
1076 set
= single_set (insn
);
1080 if (!REG_P (SET_DEST (set
)))
1083 gcc_assert (SET_DEST (set
) == reg
);
1084 rhs
= find_reg_equal_equiv_note (insn
);
1086 rhs
= XEXP (rhs
, 0);
1088 rhs
= SET_SRC (set
);
1090 iv_analyze_expr (insn
, rhs
, GET_MODE (reg
), iv
);
1091 record_iv (def
, iv
);
1095 print_rtl (dump_file
, reg
);
1096 fprintf (dump_file
, " in insn ");
1097 print_rtl_single (dump_file
, insn
);
1098 fprintf (dump_file
, " is ");
1099 dump_iv_info (dump_file
, iv
);
1100 fprintf (dump_file
, "\n");
1103 return iv
->base
!= NULL_RTX
;
1106 /* Analyzes operand OP of INSN and stores the result to *IV. */
1109 iv_analyze_op (rtx insn
, rtx op
, struct rtx_iv
*iv
)
1112 enum iv_grd_result res
;
1116 fprintf (dump_file
, "Analyzing operand ");
1117 print_rtl (dump_file
, op
);
1118 fprintf (dump_file
, " of insn ");
1119 print_rtl_single (dump_file
, insn
);
1122 if (CONSTANT_P (op
))
1123 res
= GRD_INVARIANT
;
1124 else if (GET_CODE (op
) == SUBREG
)
1126 if (!subreg_lowpart_p (op
))
1129 if (!iv_analyze_op (insn
, SUBREG_REG (op
), iv
))
1132 return iv_subreg (iv
, GET_MODE (op
));
1136 res
= iv_get_reaching_def (insn
, op
, &def
);
1137 if (res
== GRD_INVALID
)
1140 fprintf (dump_file
, " not simple.\n");
1145 if (res
== GRD_INVARIANT
)
1147 iv_constant (iv
, op
, VOIDmode
);
1151 fprintf (dump_file
, " ");
1152 dump_iv_info (dump_file
, iv
);
1153 fprintf (dump_file
, "\n");
1158 if (res
== GRD_MAYBE_BIV
)
1159 return iv_analyze_biv (op
, iv
);
1161 return iv_analyze_def (def
, iv
);
1164 /* Analyzes value VAL at INSN and stores the result to *IV. */
1167 iv_analyze (rtx insn
, rtx val
, struct rtx_iv
*iv
)
1171 /* We must find the insn in that val is used, so that we get to UD chains.
1172 Since the function is sometimes called on result of get_condition,
1173 this does not necessarily have to be directly INSN; scan also the
1175 if (simple_reg_p (val
))
1177 if (GET_CODE (val
) == SUBREG
)
1178 reg
= SUBREG_REG (val
);
1182 while (!df_find_use (insn
, reg
))
1183 insn
= NEXT_INSN (insn
);
1186 return iv_analyze_op (insn
, val
, iv
);
1189 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1192 iv_analyze_result (rtx insn
, rtx def
, struct rtx_iv
*iv
)
1196 adef
= df_find_def (insn
, def
);
1200 return iv_analyze_def (adef
, iv
);
1203 /* Checks whether definition of register REG in INSN is a basic induction
1204 variable. IV analysis must have been initialized (via a call to
1205 iv_analysis_loop_init) for this function to produce a result. */
1208 biv_p (rtx insn
, rtx reg
)
1211 df_ref def
, last_def
;
1213 if (!simple_reg_p (reg
))
1216 def
= df_find_def (insn
, reg
);
1217 gcc_assert (def
!= NULL
);
1218 if (!latch_dominating_def (reg
, &last_def
))
1220 if (last_def
!= def
)
1223 if (!iv_analyze_biv (reg
, &iv
))
1226 return iv
.step
!= const0_rtx
;
1229 /* Calculates value of IV at ITERATION-th iteration. */
1232 get_iv_value (struct rtx_iv
*iv
, rtx iteration
)
1236 /* We would need to generate some if_then_else patterns, and so far
1237 it is not needed anywhere. */
1238 gcc_assert (!iv
->first_special
);
1240 if (iv
->step
!= const0_rtx
&& iteration
!= const0_rtx
)
1241 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->base
,
1242 simplify_gen_binary (MULT
, iv
->extend_mode
,
1243 iv
->step
, iteration
));
1247 if (iv
->extend_mode
== iv
->mode
)
1250 val
= lowpart_subreg (iv
->mode
, val
, iv
->extend_mode
);
1252 if (iv
->extend
== UNKNOWN
)
1255 val
= simplify_gen_unary (iv
->extend
, iv
->extend_mode
, val
, iv
->mode
);
1256 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
1257 simplify_gen_binary (MULT
, iv
->extend_mode
,
1263 /* Free the data for an induction variable analysis. */
1266 iv_analysis_done (void)
1272 df_finish_pass (true);
1274 free (iv_ref_table
);
1275 iv_ref_table
= NULL
;
1276 iv_ref_table_size
= 0;
1281 /* Computes inverse to X modulo (1 << MOD). */
1283 static unsigned HOST_WIDEST_INT
1284 inverse (unsigned HOST_WIDEST_INT x
, int mod
)
1286 unsigned HOST_WIDEST_INT mask
=
1287 ((unsigned HOST_WIDEST_INT
) 1 << (mod
- 1) << 1) - 1;
1288 unsigned HOST_WIDEST_INT rslt
= 1;
1291 for (i
= 0; i
< mod
- 1; i
++)
1293 rslt
= (rslt
* x
) & mask
;
1300 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1303 altered_reg_used (rtx
*reg
, void *alt
)
1308 return REGNO_REG_SET_P ((bitmap
) alt
, REGNO (*reg
));
1311 /* Marks registers altered by EXPR in set ALT. */
1314 mark_altered (rtx expr
, const_rtx by ATTRIBUTE_UNUSED
, void *alt
)
1316 if (GET_CODE (expr
) == SUBREG
)
1317 expr
= SUBREG_REG (expr
);
1321 SET_REGNO_REG_SET ((bitmap
) alt
, REGNO (expr
));
1324 /* Checks whether RHS is simple enough to process. */
1327 simple_rhs_p (rtx rhs
)
1331 if (CONSTANT_P (rhs
)
1332 || (REG_P (rhs
) && !HARD_REGISTER_P (rhs
)))
1335 switch (GET_CODE (rhs
))
1340 op0
= XEXP (rhs
, 0);
1341 op1
= XEXP (rhs
, 1);
1342 /* Allow reg OP const and reg OP reg. */
1343 if (!(REG_P (op0
) && !HARD_REGISTER_P (op0
))
1344 && !function_invariant_p (op0
))
1346 if (!(REG_P (op1
) && !HARD_REGISTER_P (op1
))
1347 && !function_invariant_p (op1
))
1356 op0
= XEXP (rhs
, 0);
1357 op1
= XEXP (rhs
, 1);
1358 /* Allow reg OP const. */
1359 if (!(REG_P (op0
) && !HARD_REGISTER_P (op0
)))
1361 if (!function_invariant_p (op1
))
1371 /* If REG has a single definition, replace it with its known value in EXPR.
1372 Callback for for_each_rtx. */
1375 replace_single_def_regs (rtx
*reg
, void *expr1
)
1380 rtx
*expr
= (rtx
*)expr1
;
1385 regno
= REGNO (*reg
);
1389 adef
= DF_REG_DEF_CHAIN (regno
);
1390 if (adef
== NULL
|| DF_REF_NEXT_REG (adef
) != NULL
1391 || DF_REF_IS_ARTIFICIAL (adef
))
1394 set
= single_set (DF_REF_INSN (adef
));
1395 if (set
== NULL
|| !REG_P (SET_DEST (set
))
1396 || REGNO (SET_DEST (set
)) != regno
)
1399 note
= find_reg_equal_equiv_note (DF_REF_INSN (adef
));
1401 if (note
&& function_invariant_p (XEXP (note
, 0)))
1403 src
= XEXP (note
, 0);
1406 src
= SET_SRC (set
);
1410 regno
= REGNO (src
);
1415 if (!function_invariant_p (src
))
1418 *expr
= simplify_replace_rtx (*expr
, *reg
, src
);
1422 /* A subroutine of simplify_using_initial_values, this function examines INSN
1423 to see if it contains a suitable set that we can use to make a replacement.
1424 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1425 the set; return false otherwise. */
1428 suitable_set_for_replacement (rtx insn
, rtx
*dest
, rtx
*src
)
1430 rtx set
= single_set (insn
);
1431 rtx lhs
= NULL_RTX
, rhs
;
1436 lhs
= SET_DEST (set
);
1440 rhs
= find_reg_equal_equiv_note (insn
);
1442 rhs
= XEXP (rhs
, 0);
1444 rhs
= SET_SRC (set
);
1446 if (!simple_rhs_p (rhs
))
1454 /* Using the data returned by suitable_set_for_replacement, replace DEST
1455 with SRC in *EXPR and return the new expression. Also call
1456 replace_single_def_regs if the replacement changed something. */
1458 replace_in_expr (rtx
*expr
, rtx dest
, rtx src
)
1461 *expr
= simplify_replace_rtx (*expr
, dest
, src
);
1464 while (for_each_rtx (expr
, replace_single_def_regs
, expr
) != 0)
1468 /* Checks whether A implies B. */
1471 implies_p (rtx a
, rtx b
)
1473 rtx op0
, op1
, opb0
, opb1
, r
;
1474 enum machine_mode mode
;
1476 if (GET_CODE (a
) == EQ
)
1483 r
= simplify_replace_rtx (b
, op0
, op1
);
1484 if (r
== const_true_rtx
)
1490 r
= simplify_replace_rtx (b
, op1
, op0
);
1491 if (r
== const_true_rtx
)
1496 if (b
== const_true_rtx
)
1499 if ((GET_RTX_CLASS (GET_CODE (a
)) != RTX_COMM_COMPARE
1500 && GET_RTX_CLASS (GET_CODE (a
)) != RTX_COMPARE
)
1501 || (GET_RTX_CLASS (GET_CODE (b
)) != RTX_COMM_COMPARE
1502 && GET_RTX_CLASS (GET_CODE (b
)) != RTX_COMPARE
))
1510 mode
= GET_MODE (op0
);
1511 if (mode
!= GET_MODE (opb0
))
1513 else if (mode
== VOIDmode
)
1515 mode
= GET_MODE (op1
);
1516 if (mode
!= GET_MODE (opb1
))
1520 /* A < B implies A + 1 <= B. */
1521 if ((GET_CODE (a
) == GT
|| GET_CODE (a
) == LT
)
1522 && (GET_CODE (b
) == GE
|| GET_CODE (b
) == LE
))
1525 if (GET_CODE (a
) == GT
)
1532 if (GET_CODE (b
) == GE
)
1539 if (SCALAR_INT_MODE_P (mode
)
1540 && rtx_equal_p (op1
, opb1
)
1541 && simplify_gen_binary (MINUS
, mode
, opb0
, op0
) == const1_rtx
)
1546 /* A < B or A > B imply A != B. TODO: Likewise
1547 A + n < B implies A != B + n if neither wraps. */
1548 if (GET_CODE (b
) == NE
1549 && (GET_CODE (a
) == GT
|| GET_CODE (a
) == GTU
1550 || GET_CODE (a
) == LT
|| GET_CODE (a
) == LTU
))
1552 if (rtx_equal_p (op0
, opb0
)
1553 && rtx_equal_p (op1
, opb1
))
1557 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1558 if (GET_CODE (a
) == NE
1559 && op1
== const0_rtx
)
1561 if ((GET_CODE (b
) == GTU
1562 && opb1
== const0_rtx
)
1563 || (GET_CODE (b
) == GEU
1564 && opb1
== const1_rtx
))
1565 return rtx_equal_p (op0
, opb0
);
1568 /* A != N is equivalent to A - (N + 1) <u -1. */
1569 if (GET_CODE (a
) == NE
1570 && CONST_INT_P (op1
)
1571 && GET_CODE (b
) == LTU
1572 && opb1
== constm1_rtx
1573 && GET_CODE (opb0
) == PLUS
1574 && CONST_INT_P (XEXP (opb0
, 1))
1575 /* Avoid overflows. */
1576 && ((unsigned HOST_WIDE_INT
) INTVAL (XEXP (opb0
, 1))
1577 != ((unsigned HOST_WIDE_INT
)1
1578 << (HOST_BITS_PER_WIDE_INT
- 1)) - 1)
1579 && INTVAL (XEXP (opb0
, 1)) + 1 == -INTVAL (op1
))
1580 return rtx_equal_p (op0
, XEXP (opb0
, 0));
1582 /* Likewise, A != N implies A - N > 0. */
1583 if (GET_CODE (a
) == NE
1584 && CONST_INT_P (op1
))
1586 if (GET_CODE (b
) == GTU
1587 && GET_CODE (opb0
) == PLUS
1588 && opb1
== const0_rtx
1589 && CONST_INT_P (XEXP (opb0
, 1))
1590 /* Avoid overflows. */
1591 && ((unsigned HOST_WIDE_INT
) INTVAL (XEXP (opb0
, 1))
1592 != ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1)))
1593 && rtx_equal_p (XEXP (opb0
, 0), op0
))
1594 return INTVAL (op1
) == -INTVAL (XEXP (opb0
, 1));
1595 if (GET_CODE (b
) == GEU
1596 && GET_CODE (opb0
) == PLUS
1597 && opb1
== const1_rtx
1598 && CONST_INT_P (XEXP (opb0
, 1))
1599 /* Avoid overflows. */
1600 && ((unsigned HOST_WIDE_INT
) INTVAL (XEXP (opb0
, 1))
1601 != ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1)))
1602 && rtx_equal_p (XEXP (opb0
, 0), op0
))
1603 return INTVAL (op1
) == -INTVAL (XEXP (opb0
, 1));
1606 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1607 if ((GET_CODE (a
) == GT
|| GET_CODE (a
) == GE
)
1608 && CONST_INT_P (op1
)
1609 && ((GET_CODE (a
) == GT
&& op1
== constm1_rtx
)
1610 || INTVAL (op1
) >= 0)
1611 && GET_CODE (b
) == LTU
1612 && CONST_INT_P (opb1
)
1613 && rtx_equal_p (op0
, opb0
))
1614 return INTVAL (opb1
) < 0;
1619 /* Canonicalizes COND so that
1621 (1) Ensure that operands are ordered according to
1622 swap_commutative_operands_p.
1623 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1624 for GE, GEU, and LEU. */
1627 canon_condition (rtx cond
)
1632 enum machine_mode mode
;
1634 code
= GET_CODE (cond
);
1635 op0
= XEXP (cond
, 0);
1636 op1
= XEXP (cond
, 1);
1638 if (swap_commutative_operands_p (op0
, op1
))
1640 code
= swap_condition (code
);
1646 mode
= GET_MODE (op0
);
1647 if (mode
== VOIDmode
)
1648 mode
= GET_MODE (op1
);
1649 gcc_assert (mode
!= VOIDmode
);
1651 if (CONST_INT_P (op1
)
1652 && GET_MODE_CLASS (mode
) != MODE_CC
1653 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1655 HOST_WIDE_INT const_val
= INTVAL (op1
);
1656 unsigned HOST_WIDE_INT uconst_val
= const_val
;
1657 unsigned HOST_WIDE_INT max_val
1658 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
);
1663 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
1664 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
1667 /* When cross-compiling, const_val might be sign-extended from
1668 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1670 if ((HOST_WIDE_INT
) (const_val
& max_val
)
1671 != (((HOST_WIDE_INT
) 1
1672 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
1673 code
= GT
, op1
= gen_int_mode (const_val
- 1, mode
);
1677 if (uconst_val
< max_val
)
1678 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, mode
);
1682 if (uconst_val
!= 0)
1683 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, mode
);
1691 if (op0
!= XEXP (cond
, 0)
1692 || op1
!= XEXP (cond
, 1)
1693 || code
!= GET_CODE (cond
)
1694 || GET_MODE (cond
) != SImode
)
1695 cond
= gen_rtx_fmt_ee (code
, SImode
, op0
, op1
);
1700 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1701 set of altered regs. */
1704 simplify_using_condition (rtx cond
, rtx
*expr
, regset altered
)
1706 rtx rev
, reve
, exp
= *expr
;
1708 /* If some register gets altered later, we do not really speak about its
1709 value at the time of comparison. */
1711 && for_each_rtx (&cond
, altered_reg_used
, altered
))
1714 if (GET_CODE (cond
) == EQ
1715 && REG_P (XEXP (cond
, 0)) && CONSTANT_P (XEXP (cond
, 1)))
1717 *expr
= simplify_replace_rtx (*expr
, XEXP (cond
, 0), XEXP (cond
, 1));
1721 if (!COMPARISON_P (exp
))
1724 rev
= reversed_condition (cond
);
1725 reve
= reversed_condition (exp
);
1727 cond
= canon_condition (cond
);
1728 exp
= canon_condition (exp
);
1730 rev
= canon_condition (rev
);
1732 reve
= canon_condition (reve
);
1734 if (rtx_equal_p (exp
, cond
))
1736 *expr
= const_true_rtx
;
1740 if (rev
&& rtx_equal_p (exp
, rev
))
1746 if (implies_p (cond
, exp
))
1748 *expr
= const_true_rtx
;
1752 if (reve
&& implies_p (cond
, reve
))
1758 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1760 if (rev
&& implies_p (exp
, rev
))
1766 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1767 if (rev
&& reve
&& implies_p (reve
, rev
))
1769 *expr
= const_true_rtx
;
1773 /* We would like to have some other tests here. TODO. */
1778 /* Use relationship between A and *B to eventually eliminate *B.
1779 OP is the operation we consider. */
1782 eliminate_implied_condition (enum rtx_code op
, rtx a
, rtx
*b
)
1787 /* If A implies *B, we may replace *B by true. */
1788 if (implies_p (a
, *b
))
1789 *b
= const_true_rtx
;
1793 /* If *B implies A, we may replace *B by false. */
1794 if (implies_p (*b
, a
))
1803 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1804 operation we consider. */
1807 eliminate_implied_conditions (enum rtx_code op
, rtx
*head
, rtx tail
)
1811 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1812 eliminate_implied_condition (op
, *head
, &XEXP (elt
, 0));
1813 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1814 eliminate_implied_condition (op
, XEXP (elt
, 0), head
);
1817 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1818 is a list, its elements are assumed to be combined using OP. */
1821 simplify_using_initial_values (struct loop
*loop
, enum rtx_code op
, rtx
*expr
)
1823 bool expression_valid
;
1824 rtx head
, tail
, insn
, cond_list
, last_valid_expr
;
1826 regset altered
, this_altered
;
1832 if (CONSTANT_P (*expr
))
1835 if (GET_CODE (*expr
) == EXPR_LIST
)
1837 head
= XEXP (*expr
, 0);
1838 tail
= XEXP (*expr
, 1);
1840 eliminate_implied_conditions (op
, &head
, tail
);
1845 neutral
= const_true_rtx
;
1850 neutral
= const0_rtx
;
1851 aggr
= const_true_rtx
;
1858 simplify_using_initial_values (loop
, UNKNOWN
, &head
);
1861 XEXP (*expr
, 0) = aggr
;
1862 XEXP (*expr
, 1) = NULL_RTX
;
1865 else if (head
== neutral
)
1868 simplify_using_initial_values (loop
, op
, expr
);
1871 simplify_using_initial_values (loop
, op
, &tail
);
1873 if (tail
&& XEXP (tail
, 0) == aggr
)
1879 XEXP (*expr
, 0) = head
;
1880 XEXP (*expr
, 1) = tail
;
1884 gcc_assert (op
== UNKNOWN
);
1887 if (for_each_rtx (expr
, replace_single_def_regs
, expr
) == 0)
1889 if (CONSTANT_P (*expr
))
1892 e
= loop_preheader_edge (loop
);
1893 if (e
->src
== ENTRY_BLOCK_PTR
)
1896 altered
= ALLOC_REG_SET (®_obstack
);
1897 this_altered
= ALLOC_REG_SET (®_obstack
);
1899 expression_valid
= true;
1900 last_valid_expr
= *expr
;
1901 cond_list
= NULL_RTX
;
1904 insn
= BB_END (e
->src
);
1905 if (any_condjump_p (insn
))
1907 rtx cond
= get_condition (BB_END (e
->src
), NULL
, false, true);
1909 if (cond
&& (e
->flags
& EDGE_FALLTHRU
))
1910 cond
= reversed_condition (cond
);
1914 simplify_using_condition (cond
, expr
, altered
);
1918 if (CONSTANT_P (*expr
))
1920 for (note
= cond_list
; note
; note
= XEXP (note
, 1))
1922 simplify_using_condition (XEXP (note
, 0), expr
, altered
);
1923 if (CONSTANT_P (*expr
))
1927 cond_list
= alloc_EXPR_LIST (0, cond
, cond_list
);
1931 FOR_BB_INSNS_REVERSE (e
->src
, insn
)
1939 CLEAR_REG_SET (this_altered
);
1940 note_stores (PATTERN (insn
), mark_altered
, this_altered
);
1945 /* Kill all call clobbered registers. */
1946 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1947 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1948 SET_REGNO_REG_SET (this_altered
, i
);
1951 if (suitable_set_for_replacement (insn
, &dest
, &src
))
1953 rtx
*pnote
, *pnote_next
;
1955 replace_in_expr (expr
, dest
, src
);
1956 if (CONSTANT_P (*expr
))
1959 for (pnote
= &cond_list
; *pnote
; pnote
= pnote_next
)
1962 rtx old_cond
= XEXP (note
, 0);
1964 pnote_next
= &XEXP (note
, 1);
1965 replace_in_expr (&XEXP (note
, 0), dest
, src
);
1967 /* We can no longer use a condition that has been simplified
1968 to a constant, and simplify_using_condition will abort if
1970 if (CONSTANT_P (XEXP (note
, 0)))
1972 *pnote
= *pnote_next
;
1974 free_EXPR_LIST_node (note
);
1976 /* Retry simplifications with this condition if either the
1977 expression or the condition changed. */
1978 else if (old_cond
!= XEXP (note
, 0) || old
!= *expr
)
1979 simplify_using_condition (XEXP (note
, 0), expr
, altered
);
1983 /* If we did not use this insn to make a replacement, any overlap
1984 between stores in this insn and our expression will cause the
1985 expression to become invalid. */
1986 if (for_each_rtx (expr
, altered_reg_used
, this_altered
))
1989 if (CONSTANT_P (*expr
))
1992 IOR_REG_SET (altered
, this_altered
);
1994 /* If the expression now contains regs that have been altered, we
1995 can't return it to the caller. However, it is still valid for
1996 further simplification, so keep searching to see if we can
1997 eventually turn it into a constant. */
1998 if (for_each_rtx (expr
, altered_reg_used
, altered
))
1999 expression_valid
= false;
2000 if (expression_valid
)
2001 last_valid_expr
= *expr
;
2004 if (!single_pred_p (e
->src
)
2005 || single_pred (e
->src
) == ENTRY_BLOCK_PTR
)
2007 e
= single_pred_edge (e
->src
);
2011 free_EXPR_LIST_list (&cond_list
);
2012 if (!CONSTANT_P (*expr
))
2013 *expr
= last_valid_expr
;
2014 FREE_REG_SET (altered
);
2015 FREE_REG_SET (this_altered
);
2018 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
2019 that IV occurs as left operands of comparison COND and its signedness
2020 is SIGNED_P to DESC. */
2023 shorten_into_mode (struct rtx_iv
*iv
, enum machine_mode mode
,
2024 enum rtx_code cond
, bool signed_p
, struct niter_desc
*desc
)
2026 rtx mmin
, mmax
, cond_over
, cond_under
;
2028 get_mode_bounds (mode
, signed_p
, iv
->extend_mode
, &mmin
, &mmax
);
2029 cond_under
= simplify_gen_relational (LT
, SImode
, iv
->extend_mode
,
2031 cond_over
= simplify_gen_relational (GT
, SImode
, iv
->extend_mode
,
2040 if (cond_under
!= const0_rtx
)
2042 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
2043 if (cond_over
!= const0_rtx
)
2044 desc
->noloop_assumptions
=
2045 alloc_EXPR_LIST (0, cond_over
, desc
->noloop_assumptions
);
2052 if (cond_over
!= const0_rtx
)
2054 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
2055 if (cond_under
!= const0_rtx
)
2056 desc
->noloop_assumptions
=
2057 alloc_EXPR_LIST (0, cond_under
, desc
->noloop_assumptions
);
2061 if (cond_over
!= const0_rtx
)
2063 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
2064 if (cond_under
!= const0_rtx
)
2066 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
2074 iv
->extend
= signed_p
? SIGN_EXTEND
: ZERO_EXTEND
;
2077 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2078 subregs of the same mode if possible (sometimes it is necessary to add
2079 some assumptions to DESC). */
2082 canonicalize_iv_subregs (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
,
2083 enum rtx_code cond
, struct niter_desc
*desc
)
2085 enum machine_mode comp_mode
;
2088 /* If the ivs behave specially in the first iteration, or are
2089 added/multiplied after extending, we ignore them. */
2090 if (iv0
->first_special
|| iv0
->mult
!= const1_rtx
|| iv0
->delta
!= const0_rtx
)
2092 if (iv1
->first_special
|| iv1
->mult
!= const1_rtx
|| iv1
->delta
!= const0_rtx
)
2095 /* If there is some extend, it must match signedness of the comparison. */
2100 if (iv0
->extend
== ZERO_EXTEND
2101 || iv1
->extend
== ZERO_EXTEND
)
2108 if (iv0
->extend
== SIGN_EXTEND
2109 || iv1
->extend
== SIGN_EXTEND
)
2115 if (iv0
->extend
!= UNKNOWN
2116 && iv1
->extend
!= UNKNOWN
2117 && iv0
->extend
!= iv1
->extend
)
2121 if (iv0
->extend
!= UNKNOWN
)
2122 signed_p
= iv0
->extend
== SIGN_EXTEND
;
2123 if (iv1
->extend
!= UNKNOWN
)
2124 signed_p
= iv1
->extend
== SIGN_EXTEND
;
2131 /* Values of both variables should be computed in the same mode. These
2132 might indeed be different, if we have comparison like
2134 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2136 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2137 in different modes. This does not seem impossible to handle, but
2138 it hardly ever occurs in practice.
2140 The only exception is the case when one of operands is invariant.
2141 For example pentium 3 generates comparisons like
2142 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2143 definitely do not want this prevent the optimization. */
2144 comp_mode
= iv0
->extend_mode
;
2145 if (GET_MODE_BITSIZE (comp_mode
) < GET_MODE_BITSIZE (iv1
->extend_mode
))
2146 comp_mode
= iv1
->extend_mode
;
2148 if (iv0
->extend_mode
!= comp_mode
)
2150 if (iv0
->mode
!= iv0
->extend_mode
2151 || iv0
->step
!= const0_rtx
)
2154 iv0
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
2155 comp_mode
, iv0
->base
, iv0
->mode
);
2156 iv0
->extend_mode
= comp_mode
;
2159 if (iv1
->extend_mode
!= comp_mode
)
2161 if (iv1
->mode
!= iv1
->extend_mode
2162 || iv1
->step
!= const0_rtx
)
2165 iv1
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
2166 comp_mode
, iv1
->base
, iv1
->mode
);
2167 iv1
->extend_mode
= comp_mode
;
2170 /* Check that both ivs belong to a range of a single mode. If one of the
2171 operands is an invariant, we may need to shorten it into the common
2173 if (iv0
->mode
== iv0
->extend_mode
2174 && iv0
->step
== const0_rtx
2175 && iv0
->mode
!= iv1
->mode
)
2176 shorten_into_mode (iv0
, iv1
->mode
, cond
, signed_p
, desc
);
2178 if (iv1
->mode
== iv1
->extend_mode
2179 && iv1
->step
== const0_rtx
2180 && iv0
->mode
!= iv1
->mode
)
2181 shorten_into_mode (iv1
, iv0
->mode
, swap_condition (cond
), signed_p
, desc
);
2183 if (iv0
->mode
!= iv1
->mode
)
2186 desc
->mode
= iv0
->mode
;
2187 desc
->signed_p
= signed_p
;
2192 /* Tries to estimate the maximum number of iterations in LOOP, and store the
2193 result in DESC. This function is called from iv_number_of_iterations with
2194 a number of fields in DESC already filled in. OLD_NITER is the original
2195 expression for the number of iterations, before we tried to simplify it. */
2197 static unsigned HOST_WIDEST_INT
2198 determine_max_iter (struct loop
*loop
, struct niter_desc
*desc
, rtx old_niter
)
2200 rtx niter
= desc
->niter_expr
;
2201 rtx mmin
, mmax
, cmp
;
2202 unsigned HOST_WIDEST_INT nmax
, inc
;
2204 if (GET_CODE (niter
) == AND
2205 && CONST_INT_P (XEXP (niter
, 0)))
2207 nmax
= INTVAL (XEXP (niter
, 0));
2208 if (!(nmax
& (nmax
+ 1)))
2210 desc
->niter_max
= nmax
;
2215 get_mode_bounds (desc
->mode
, desc
->signed_p
, desc
->mode
, &mmin
, &mmax
);
2216 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
2218 if (GET_CODE (niter
) == UDIV
)
2220 if (!CONST_INT_P (XEXP (niter
, 1)))
2222 desc
->niter_max
= nmax
;
2225 inc
= INTVAL (XEXP (niter
, 1));
2226 niter
= XEXP (niter
, 0);
2231 /* We could use a binary search here, but for now improving the upper
2232 bound by just one eliminates one important corner case. */
2233 cmp
= simplify_gen_relational (desc
->signed_p
? LT
: LTU
, VOIDmode
,
2234 desc
->mode
, old_niter
, mmax
);
2235 simplify_using_initial_values (loop
, UNKNOWN
, &cmp
);
2236 if (cmp
== const_true_rtx
)
2241 fprintf (dump_file
, ";; improved upper bound by one.\n");
2243 desc
->niter_max
= nmax
/ inc
;
2247 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2248 the result into DESC. Very similar to determine_number_of_iterations
2249 (basically its rtl version), complicated by things like subregs. */
2252 iv_number_of_iterations (struct loop
*loop
, rtx insn
, rtx condition
,
2253 struct niter_desc
*desc
)
2255 rtx op0
, op1
, delta
, step
, bound
, may_xform
, tmp
, tmp0
, tmp1
;
2256 struct rtx_iv iv0
, iv1
, tmp_iv
;
2257 rtx assumption
, may_not_xform
;
2259 enum machine_mode mode
, comp_mode
;
2260 rtx mmin
, mmax
, mode_mmin
, mode_mmax
;
2261 unsigned HOST_WIDEST_INT s
, size
, d
, inv
;
2262 HOST_WIDEST_INT up
, down
, inc
, step_val
;
2263 int was_sharp
= false;
2267 /* The meaning of these assumptions is this:
2269 then the rest of information does not have to be valid
2270 if noloop_assumptions then the loop does not roll
2271 if infinite then this exit is never used */
2273 desc
->assumptions
= NULL_RTX
;
2274 desc
->noloop_assumptions
= NULL_RTX
;
2275 desc
->infinite
= NULL_RTX
;
2276 desc
->simple_p
= true;
2278 desc
->const_iter
= false;
2279 desc
->niter_expr
= NULL_RTX
;
2280 desc
->niter_max
= 0;
2282 cond
= GET_CODE (condition
);
2283 gcc_assert (COMPARISON_P (condition
));
2285 mode
= GET_MODE (XEXP (condition
, 0));
2286 if (mode
== VOIDmode
)
2287 mode
= GET_MODE (XEXP (condition
, 1));
2288 /* The constant comparisons should be folded. */
2289 gcc_assert (mode
!= VOIDmode
);
2291 /* We only handle integers or pointers. */
2292 if (GET_MODE_CLASS (mode
) != MODE_INT
2293 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
2296 op0
= XEXP (condition
, 0);
2297 if (!iv_analyze (insn
, op0
, &iv0
))
2299 if (iv0
.extend_mode
== VOIDmode
)
2300 iv0
.mode
= iv0
.extend_mode
= mode
;
2302 op1
= XEXP (condition
, 1);
2303 if (!iv_analyze (insn
, op1
, &iv1
))
2305 if (iv1
.extend_mode
== VOIDmode
)
2306 iv1
.mode
= iv1
.extend_mode
= mode
;
2308 if (GET_MODE_BITSIZE (iv0
.extend_mode
) > HOST_BITS_PER_WIDE_INT
2309 || GET_MODE_BITSIZE (iv1
.extend_mode
) > HOST_BITS_PER_WIDE_INT
)
2312 /* Check condition and normalize it. */
2320 tmp_iv
= iv0
; iv0
= iv1
; iv1
= tmp_iv
;
2321 cond
= swap_condition (cond
);
2333 /* Handle extends. This is relatively nontrivial, so we only try in some
2334 easy cases, when we can canonicalize the ivs (possibly by adding some
2335 assumptions) to shape subreg (base + i * step). This function also fills
2336 in desc->mode and desc->signed_p. */
2338 if (!canonicalize_iv_subregs (&iv0
, &iv1
, cond
, desc
))
2341 comp_mode
= iv0
.extend_mode
;
2343 size
= GET_MODE_BITSIZE (mode
);
2344 get_mode_bounds (mode
, (cond
== LE
|| cond
== LT
), comp_mode
, &mmin
, &mmax
);
2345 mode_mmin
= lowpart_subreg (mode
, mmin
, comp_mode
);
2346 mode_mmax
= lowpart_subreg (mode
, mmax
, comp_mode
);
2348 if (!CONST_INT_P (iv0
.step
) || !CONST_INT_P (iv1
.step
))
2351 /* We can take care of the case of two induction variables chasing each other
2352 if the test is NE. I have never seen a loop using it, but still it is
2354 if (iv0
.step
!= const0_rtx
&& iv1
.step
!= const0_rtx
)
2359 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2360 iv1
.step
= const0_rtx
;
2363 /* This is either infinite loop or the one that ends immediately, depending
2364 on initial values. Unswitching should remove this kind of conditions. */
2365 if (iv0
.step
== const0_rtx
&& iv1
.step
== const0_rtx
)
2370 if (iv0
.step
== const0_rtx
)
2371 step_val
= -INTVAL (iv1
.step
);
2373 step_val
= INTVAL (iv0
.step
);
2375 /* Ignore loops of while (i-- < 10) type. */
2379 step_is_pow2
= !(step_val
& (step_val
- 1));
2383 /* We do not care about whether the step is power of two in this
2385 step_is_pow2
= false;
2389 /* Some more condition normalization. We must record some assumptions
2390 due to overflows. */
2395 /* We want to take care only of non-sharp relationals; this is easy,
2396 as in cases the overflow would make the transformation unsafe
2397 the loop does not roll. Seemingly it would make more sense to want
2398 to take care of sharp relationals instead, as NE is more similar to
2399 them, but the problem is that here the transformation would be more
2400 difficult due to possibly infinite loops. */
2401 if (iv0
.step
== const0_rtx
)
2403 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2404 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2406 if (assumption
== const_true_rtx
)
2407 goto zero_iter_simplify
;
2408 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2409 iv0
.base
, const1_rtx
);
2413 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2414 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
,
2416 if (assumption
== const_true_rtx
)
2417 goto zero_iter_simplify
;
2418 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2419 iv1
.base
, constm1_rtx
);
2422 if (assumption
!= const0_rtx
)
2423 desc
->noloop_assumptions
=
2424 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2425 cond
= (cond
== LT
) ? LE
: LEU
;
2427 /* It will be useful to be able to tell the difference once more in
2428 LE -> NE reduction. */
2434 /* Take care of trivially infinite loops. */
2437 if (iv0
.step
== const0_rtx
)
2439 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2440 if (rtx_equal_p (tmp
, mode_mmin
))
2443 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2444 /* Fill in the remaining fields somehow. */
2445 goto zero_iter_simplify
;
2450 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2451 if (rtx_equal_p (tmp
, mode_mmax
))
2454 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2455 /* Fill in the remaining fields somehow. */
2456 goto zero_iter_simplify
;
2461 /* If we can we want to take care of NE conditions instead of size
2462 comparisons, as they are much more friendly (most importantly
2463 this takes care of special handling of loops with step 1). We can
2464 do it if we first check that upper bound is greater or equal to
2465 lower bound, their difference is constant c modulo step and that
2466 there is not an overflow. */
2469 if (iv0
.step
== const0_rtx
)
2470 step
= simplify_gen_unary (NEG
, comp_mode
, iv1
.step
, comp_mode
);
2473 delta
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2474 delta
= lowpart_subreg (mode
, delta
, comp_mode
);
2475 delta
= simplify_gen_binary (UMOD
, mode
, delta
, step
);
2476 may_xform
= const0_rtx
;
2477 may_not_xform
= const_true_rtx
;
2479 if (CONST_INT_P (delta
))
2481 if (was_sharp
&& INTVAL (delta
) == INTVAL (step
) - 1)
2483 /* A special case. We have transformed condition of type
2484 for (i = 0; i < 4; i += 4)
2486 for (i = 0; i <= 3; i += 4)
2487 obviously if the test for overflow during that transformation
2488 passed, we cannot overflow here. Most importantly any
2489 loop with sharp end condition and step 1 falls into this
2490 category, so handling this case specially is definitely
2491 worth the troubles. */
2492 may_xform
= const_true_rtx
;
2494 else if (iv0
.step
== const0_rtx
)
2496 bound
= simplify_gen_binary (PLUS
, comp_mode
, mmin
, step
);
2497 bound
= simplify_gen_binary (MINUS
, comp_mode
, bound
, delta
);
2498 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2499 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2500 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2502 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2508 bound
= simplify_gen_binary (MINUS
, comp_mode
, mmax
, step
);
2509 bound
= simplify_gen_binary (PLUS
, comp_mode
, bound
, delta
);
2510 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2511 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2512 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2514 may_not_xform
= simplify_gen_relational (reverse_condition (cond
),
2520 if (may_xform
!= const0_rtx
)
2522 /* We perform the transformation always provided that it is not
2523 completely senseless. This is OK, as we would need this assumption
2524 to determine the number of iterations anyway. */
2525 if (may_xform
!= const_true_rtx
)
2527 /* If the step is a power of two and the final value we have
2528 computed overflows, the cycle is infinite. Otherwise it
2529 is nontrivial to compute the number of iterations. */
2531 desc
->infinite
= alloc_EXPR_LIST (0, may_not_xform
,
2534 desc
->assumptions
= alloc_EXPR_LIST (0, may_xform
,
2538 /* We are going to lose some information about upper bound on
2539 number of iterations in this step, so record the information
2541 inc
= INTVAL (iv0
.step
) - INTVAL (iv1
.step
);
2542 if (CONST_INT_P (iv1
.base
))
2543 up
= INTVAL (iv1
.base
);
2545 up
= INTVAL (mode_mmax
) - inc
;
2546 down
= INTVAL (CONST_INT_P (iv0
.base
)
2549 desc
->niter_max
= (up
- down
) / inc
+ 1;
2551 if (iv0
.step
== const0_rtx
)
2553 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, delta
);
2554 iv0
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.base
, step
);
2558 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, delta
);
2559 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, step
);
2562 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2563 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2564 assumption
= simplify_gen_relational (reverse_condition (cond
),
2565 SImode
, mode
, tmp0
, tmp1
);
2566 if (assumption
== const_true_rtx
)
2567 goto zero_iter_simplify
;
2568 else if (assumption
!= const0_rtx
)
2569 desc
->noloop_assumptions
=
2570 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2575 /* Count the number of iterations. */
2578 /* Everything we do here is just arithmetics modulo size of mode. This
2579 makes us able to do more involved computations of number of iterations
2580 than in other cases. First transform the condition into shape
2581 s * i <> c, with s positive. */
2582 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2583 iv0
.base
= const0_rtx
;
2584 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2585 iv1
.step
= const0_rtx
;
2586 if (INTVAL (iv0
.step
) < 0)
2588 iv0
.step
= simplify_gen_unary (NEG
, comp_mode
, iv0
.step
, mode
);
2589 iv1
.base
= simplify_gen_unary (NEG
, comp_mode
, iv1
.base
, mode
);
2591 iv0
.step
= lowpart_subreg (mode
, iv0
.step
, comp_mode
);
2593 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2594 is infinite. Otherwise, the number of iterations is
2595 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2596 s
= INTVAL (iv0
.step
); d
= 1;
2603 bound
= GEN_INT (((unsigned HOST_WIDEST_INT
) 1 << (size
- 1 ) << 1) - 1);
2605 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2606 tmp
= simplify_gen_binary (UMOD
, mode
, tmp1
, GEN_INT (d
));
2607 assumption
= simplify_gen_relational (NE
, SImode
, mode
, tmp
, const0_rtx
);
2608 desc
->infinite
= alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2610 tmp
= simplify_gen_binary (UDIV
, mode
, tmp1
, GEN_INT (d
));
2611 inv
= inverse (s
, size
);
2612 tmp
= simplify_gen_binary (MULT
, mode
, tmp
, gen_int_mode (inv
, mode
));
2613 desc
->niter_expr
= simplify_gen_binary (AND
, mode
, tmp
, bound
);
2617 if (iv1
.step
== const0_rtx
)
2618 /* Condition in shape a + s * i <= b
2619 We must know that b + s does not overflow and a <= b + s and then we
2620 can compute number of iterations as (b + s - a) / s. (It might
2621 seem that we in fact could be more clever about testing the b + s
2622 overflow condition using some information about b - a mod s,
2623 but it was already taken into account during LE -> NE transform). */
2626 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2627 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2629 bound
= simplify_gen_binary (MINUS
, mode
, mode_mmax
,
2630 lowpart_subreg (mode
, step
,
2636 /* If s is power of 2, we know that the loop is infinite if
2637 a % s <= b % s and b + s overflows. */
2638 assumption
= simplify_gen_relational (reverse_condition (cond
),
2642 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2643 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2644 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2645 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2647 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2651 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2654 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2657 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, iv0
.step
);
2658 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2659 assumption
= simplify_gen_relational (reverse_condition (cond
),
2660 SImode
, mode
, tmp0
, tmp
);
2662 delta
= simplify_gen_binary (PLUS
, mode
, tmp1
, step
);
2663 delta
= simplify_gen_binary (MINUS
, mode
, delta
, tmp0
);
2667 /* Condition in shape a <= b - s * i
2668 We must know that a - s does not overflow and a - s <= b and then
2669 we can again compute number of iterations as (b - (a - s)) / s. */
2670 step
= simplify_gen_unary (NEG
, mode
, iv1
.step
, mode
);
2671 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2672 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2674 bound
= simplify_gen_binary (PLUS
, mode
, mode_mmin
,
2675 lowpart_subreg (mode
, step
, comp_mode
));
2680 /* If s is power of 2, we know that the loop is infinite if
2681 a % s <= b % s and a - s overflows. */
2682 assumption
= simplify_gen_relational (reverse_condition (cond
),
2686 t0
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp0
), step
);
2687 t1
= simplify_gen_binary (UMOD
, mode
, copy_rtx (tmp1
), step
);
2688 tmp
= simplify_gen_relational (cond
, SImode
, mode
, t0
, t1
);
2689 assumption
= simplify_gen_binary (AND
, SImode
, assumption
, tmp
);
2691 alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2695 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2698 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2701 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, iv1
.step
);
2702 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2703 assumption
= simplify_gen_relational (reverse_condition (cond
),
2706 delta
= simplify_gen_binary (MINUS
, mode
, tmp0
, step
);
2707 delta
= simplify_gen_binary (MINUS
, mode
, tmp1
, delta
);
2709 if (assumption
== const_true_rtx
)
2710 goto zero_iter_simplify
;
2711 else if (assumption
!= const0_rtx
)
2712 desc
->noloop_assumptions
=
2713 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2714 delta
= simplify_gen_binary (UDIV
, mode
, delta
, step
);
2715 desc
->niter_expr
= delta
;
2718 old_niter
= desc
->niter_expr
;
2720 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2721 if (desc
->assumptions
2722 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2724 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2725 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2726 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2728 /* Rerun the simplification. Consider code (created by copying loop headers)
2740 The first pass determines that i = 0, the second pass uses it to eliminate
2741 noloop assumption. */
2743 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2744 if (desc
->assumptions
2745 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2747 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2748 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2749 simplify_using_initial_values (loop
, UNKNOWN
, &desc
->niter_expr
);
2751 if (desc
->noloop_assumptions
2752 && XEXP (desc
->noloop_assumptions
, 0) == const_true_rtx
)
2755 if (CONST_INT_P (desc
->niter_expr
))
2757 unsigned HOST_WIDEST_INT val
= INTVAL (desc
->niter_expr
);
2759 desc
->const_iter
= true;
2760 desc
->niter_max
= desc
->niter
= val
& GET_MODE_MASK (desc
->mode
);
2764 if (!desc
->niter_max
)
2765 desc
->niter_max
= determine_max_iter (loop
, desc
, old_niter
);
2767 /* simplify_using_initial_values does a copy propagation on the registers
2768 in the expression for the number of iterations. This prolongs life
2769 ranges of registers and increases register pressure, and usually
2770 brings no gain (and if it happens to do, the cse pass will take care
2771 of it anyway). So prevent this behavior, unless it enabled us to
2772 derive that the number of iterations is a constant. */
2773 desc
->niter_expr
= old_niter
;
2779 /* Simplify the assumptions. */
2780 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2781 if (desc
->assumptions
2782 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2784 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2788 desc
->const_iter
= true;
2790 desc
->niter_max
= 0;
2791 desc
->noloop_assumptions
= NULL_RTX
;
2792 desc
->niter_expr
= const0_rtx
;
2796 desc
->simple_p
= false;
2800 /* Checks whether E is a simple exit from LOOP and stores its description
2804 check_simple_exit (struct loop
*loop
, edge e
, struct niter_desc
*desc
)
2806 basic_block exit_bb
;
2811 desc
->simple_p
= false;
2813 /* It must belong directly to the loop. */
2814 if (exit_bb
->loop_father
!= loop
)
2817 /* It must be tested (at least) once during any iteration. */
2818 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit_bb
))
2821 /* It must end in a simple conditional jump. */
2822 if (!any_condjump_p (BB_END (exit_bb
)))
2825 ein
= EDGE_SUCC (exit_bb
, 0);
2827 ein
= EDGE_SUCC (exit_bb
, 1);
2830 desc
->in_edge
= ein
;
2832 /* Test whether the condition is suitable. */
2833 if (!(condition
= get_condition (BB_END (ein
->src
), &at
, false, false)))
2836 if (ein
->flags
& EDGE_FALLTHRU
)
2838 condition
= reversed_condition (condition
);
2843 /* Check that we are able to determine number of iterations and fill
2844 in information about it. */
2845 iv_number_of_iterations (loop
, at
, condition
, desc
);
2848 /* Finds a simple exit of LOOP and stores its description into DESC. */
2851 find_simple_exit (struct loop
*loop
, struct niter_desc
*desc
)
2856 struct niter_desc act
;
2860 desc
->simple_p
= false;
2861 body
= get_loop_body (loop
);
2863 for (i
= 0; i
< loop
->num_nodes
; i
++)
2865 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
2867 if (flow_bb_inside_loop_p (loop
, e
->dest
))
2870 check_simple_exit (loop
, e
, &act
);
2878 /* Prefer constant iterations; the less the better. */
2880 || (desc
->const_iter
&& act
.niter
>= desc
->niter
))
2883 /* Also if the actual exit may be infinite, while the old one
2884 not, prefer the old one. */
2885 if (act
.infinite
&& !desc
->infinite
)
2897 fprintf (dump_file
, "Loop %d is simple:\n", loop
->num
);
2898 fprintf (dump_file
, " simple exit %d -> %d\n",
2899 desc
->out_edge
->src
->index
,
2900 desc
->out_edge
->dest
->index
);
2901 if (desc
->assumptions
)
2903 fprintf (dump_file
, " assumptions: ");
2904 print_rtl (dump_file
, desc
->assumptions
);
2905 fprintf (dump_file
, "\n");
2907 if (desc
->noloop_assumptions
)
2909 fprintf (dump_file
, " does not roll if: ");
2910 print_rtl (dump_file
, desc
->noloop_assumptions
);
2911 fprintf (dump_file
, "\n");
2915 fprintf (dump_file
, " infinite if: ");
2916 print_rtl (dump_file
, desc
->infinite
);
2917 fprintf (dump_file
, "\n");
2920 fprintf (dump_file
, " number of iterations: ");
2921 print_rtl (dump_file
, desc
->niter_expr
);
2922 fprintf (dump_file
, "\n");
2924 fprintf (dump_file
, " upper bound: ");
2925 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
, desc
->niter_max
);
2926 fprintf (dump_file
, "\n");
2929 fprintf (dump_file
, "Loop %d is not simple.\n", loop
->num
);
2935 /* Creates a simple loop description of LOOP if it was not computed
2939 get_simple_loop_desc (struct loop
*loop
)
2941 struct niter_desc
*desc
= simple_loop_desc (loop
);
2946 /* At least desc->infinite is not always initialized by
2947 find_simple_loop_exit. */
2948 desc
= XCNEW (struct niter_desc
);
2949 iv_analysis_loop_init (loop
);
2950 find_simple_exit (loop
, desc
);
2953 if (desc
->simple_p
&& (desc
->assumptions
|| desc
->infinite
))
2955 const char *wording
;
2957 /* Assume that no overflow happens and that the loop is finite.
2958 We already warned at the tree level if we ran optimizations there. */
2959 if (!flag_tree_loop_optimize
&& warn_unsafe_loop_optimizations
)
2964 flag_unsafe_loop_optimizations
2965 ? N_("assuming that the loop is not infinite")
2966 : N_("cannot optimize possibly infinite loops");
2967 warning (OPT_Wunsafe_loop_optimizations
, "%s",
2970 if (desc
->assumptions
)
2973 flag_unsafe_loop_optimizations
2974 ? N_("assuming that the loop counter does not overflow")
2975 : N_("cannot optimize loop, the loop counter may overflow");
2976 warning (OPT_Wunsafe_loop_optimizations
, "%s",
2981 if (flag_unsafe_loop_optimizations
)
2983 desc
->assumptions
= NULL_RTX
;
2984 desc
->infinite
= NULL_RTX
;
2991 /* Releases simple loop description for LOOP. */
2994 free_simple_loop_desc (struct loop
*loop
)
2996 struct niter_desc
*desc
= simple_loop_desc (loop
);