* loop-iv.c (simplify_using_initial_values): Return if the
[official-gcc.git] / gcc / loop-iv.c
blob21005d3ff51f3644ad004f0149b5683d72a3fd33
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This is a simple analysis of induction variables of the loop. The major use
22 is for determining the number of iterations of a loop for loop unrolling,
23 doloop optimization and branch prediction. The iv information is computed
24 on demand.
26 Induction variable is analyzed by walking the use-def chains. When a biv
27 is found, it is cached in the bivs hash table. When register is proved
28 to be a giv, its description is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
48 iv_current_loop_df (): Returns the dataflow object for the current loop used
49 by iv analysis. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "hard-reg-set.h"
57 #include "obstack.h"
58 #include "basic-block.h"
59 #include "cfgloop.h"
60 #include "expr.h"
61 #include "intl.h"
62 #include "output.h"
63 #include "toplev.h"
64 #include "df.h"
65 #include "hashtab.h"
67 /* Possible return values of iv_get_reaching_def. */
69 enum iv_grd_result
71 /* More than one reaching def, or reaching def that does not
72 dominate the use. */
73 GRD_INVALID,
75 /* The use is trivial invariant of the loop, i.e. is not changed
76 inside the loop. */
77 GRD_INVARIANT,
79 /* The use is reached by initial value and a value from the
80 previous iteration. */
81 GRD_MAYBE_BIV,
83 /* The use has single dominating def. */
84 GRD_SINGLE_DOM
87 /* Information about a biv. */
89 struct biv_entry
91 unsigned regno; /* The register of the biv. */
92 struct rtx_iv iv; /* Value of the biv. */
95 /* Induction variable stored at the reference. */
96 #define DF_REF_IV(REF) ((struct rtx_iv *) DF_REF_DATA (REF))
97 #define DF_REF_IV_SET(REF, IV) DF_REF_DATA (REF) = (IV)
99 /* The current loop. */
101 static struct loop *current_loop;
103 /* Dataflow information for the current loop. */
105 static struct df *df = NULL;
107 /* Bivs of the current loop. */
109 static htab_t bivs;
111 /* Return the dataflow object for the current loop. */
112 struct df *
113 iv_current_loop_df (void)
115 return df;
118 static bool iv_analyze_op (rtx, rtx, struct rtx_iv *);
120 /* Dumps information about IV to FILE. */
122 extern void dump_iv_info (FILE *, struct rtx_iv *);
123 void
124 dump_iv_info (FILE *file, struct rtx_iv *iv)
126 if (!iv->base)
128 fprintf (file, "not simple");
129 return;
132 if (iv->step == const0_rtx
133 && !iv->first_special)
134 fprintf (file, "invariant ");
136 print_rtl (file, iv->base);
137 if (iv->step != const0_rtx)
139 fprintf (file, " + ");
140 print_rtl (file, iv->step);
141 fprintf (file, " * iteration");
143 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
145 if (iv->mode != iv->extend_mode)
146 fprintf (file, " %s to %s",
147 rtx_name[iv->extend],
148 GET_MODE_NAME (iv->extend_mode));
150 if (iv->mult != const1_rtx)
152 fprintf (file, " * ");
153 print_rtl (file, iv->mult);
155 if (iv->delta != const0_rtx)
157 fprintf (file, " + ");
158 print_rtl (file, iv->delta);
160 if (iv->first_special)
161 fprintf (file, " (first special)");
164 /* Generates a subreg to get the least significant part of EXPR (in mode
165 INNER_MODE) to OUTER_MODE. */
168 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
169 enum machine_mode inner_mode)
171 return simplify_gen_subreg (outer_mode, expr, inner_mode,
172 subreg_lowpart_offset (outer_mode, inner_mode));
175 /* Checks whether REG is a well-behaved register. */
177 static bool
178 simple_reg_p (rtx reg)
180 unsigned r;
182 if (GET_CODE (reg) == SUBREG)
184 if (!subreg_lowpart_p (reg))
185 return false;
186 reg = SUBREG_REG (reg);
189 if (!REG_P (reg))
190 return false;
192 r = REGNO (reg);
193 if (HARD_REGISTER_NUM_P (r))
194 return false;
196 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
197 return false;
199 return true;
202 /* Clears the information about ivs stored in df. */
204 static void
205 clear_iv_info (void)
207 unsigned i, n_defs = DF_DEFS_SIZE (df);
208 struct rtx_iv *iv;
209 struct df_ref *def;
211 for (i = 0; i < n_defs; i++)
213 def = DF_DEFS_GET (df, i);
214 iv = DF_REF_IV (def);
215 if (!iv)
216 continue;
217 free (iv);
218 DF_REF_IV_SET (def, NULL);
221 htab_empty (bivs);
224 /* Returns hash value for biv B. */
226 static hashval_t
227 biv_hash (const void *b)
229 return ((const struct biv_entry *) b)->regno;
232 /* Compares biv B and register R. */
234 static int
235 biv_eq (const void *b, const void *r)
237 return ((const struct biv_entry *) b)->regno == REGNO ((rtx) r);
240 /* Prepare the data for an induction variable analysis of a LOOP. */
242 void
243 iv_analysis_loop_init (struct loop *loop)
245 basic_block *body = get_loop_body_in_dom_order (loop), bb;
246 bitmap blocks = BITMAP_ALLOC (NULL);
247 unsigned i;
248 bool first_time = (df == NULL);
250 current_loop = loop;
252 /* Clear the information from the analysis of the previous loop. */
253 if (first_time)
255 df = df_init (DF_HARD_REGS | DF_EQUIV_NOTES);
256 df_chain_add_problem (df, DF_UD_CHAIN);
257 bivs = htab_create (10, biv_hash, biv_eq, free);
259 else
260 clear_iv_info ();
262 for (i = 0; i < loop->num_nodes; i++)
264 bb = body[i];
265 bitmap_set_bit (blocks, bb->index);
267 df_set_blocks (df, blocks);
268 df_analyze (df);
269 BITMAP_FREE (blocks);
270 free (body);
273 /* Finds the definition of REG that dominates loop latch and stores
274 it to DEF. Returns false if there is not a single definition
275 dominating the latch. If REG has no definition in loop, DEF
276 is set to NULL and true is returned. */
278 static bool
279 latch_dominating_def (rtx reg, struct df_ref **def)
281 struct df_ref *single_rd = NULL, *adef;
282 unsigned regno = REGNO (reg);
283 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
284 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (df, current_loop->latch);
286 for (adef = reg_info->reg_chain; adef; adef = adef->next_reg)
288 if (!bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
289 continue;
291 /* More than one reaching definition. */
292 if (single_rd)
293 return false;
295 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
296 return false;
298 single_rd = adef;
301 *def = single_rd;
302 return true;
305 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
307 static enum iv_grd_result
308 iv_get_reaching_def (rtx insn, rtx reg, struct df_ref **def)
310 struct df_ref *use, *adef;
311 basic_block def_bb, use_bb;
312 rtx def_insn;
313 bool dom_p;
315 *def = NULL;
316 if (!simple_reg_p (reg))
317 return GRD_INVALID;
318 if (GET_CODE (reg) == SUBREG)
319 reg = SUBREG_REG (reg);
320 gcc_assert (REG_P (reg));
322 use = df_find_use (df, insn, reg);
323 gcc_assert (use != NULL);
325 if (!DF_REF_CHAIN (use))
326 return GRD_INVARIANT;
328 /* More than one reaching def. */
329 if (DF_REF_CHAIN (use)->next)
330 return GRD_INVALID;
332 adef = DF_REF_CHAIN (use)->ref;
333 def_insn = DF_REF_INSN (adef);
334 def_bb = DF_REF_BB (adef);
335 use_bb = BLOCK_FOR_INSN (insn);
337 if (use_bb == def_bb)
338 dom_p = (DF_INSN_LUID (df, def_insn) < DF_INSN_LUID (df, insn));
339 else
340 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
342 if (dom_p)
344 *def = adef;
345 return GRD_SINGLE_DOM;
348 /* The definition does not dominate the use. This is still OK if
349 this may be a use of a biv, i.e. if the def_bb dominates loop
350 latch. */
351 if (just_once_each_iteration_p (current_loop, def_bb))
352 return GRD_MAYBE_BIV;
354 return GRD_INVALID;
357 /* Sets IV to invariant CST in MODE. Always returns true (just for
358 consistency with other iv manipulation functions that may fail). */
360 static bool
361 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
363 if (mode == VOIDmode)
364 mode = GET_MODE (cst);
366 iv->mode = mode;
367 iv->base = cst;
368 iv->step = const0_rtx;
369 iv->first_special = false;
370 iv->extend = UNKNOWN;
371 iv->extend_mode = iv->mode;
372 iv->delta = const0_rtx;
373 iv->mult = const1_rtx;
375 return true;
378 /* Evaluates application of subreg to MODE on IV. */
380 static bool
381 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
383 /* If iv is invariant, just calculate the new value. */
384 if (iv->step == const0_rtx
385 && !iv->first_special)
387 rtx val = get_iv_value (iv, const0_rtx);
388 val = lowpart_subreg (mode, val, iv->extend_mode);
390 iv->base = val;
391 iv->extend = UNKNOWN;
392 iv->mode = iv->extend_mode = mode;
393 iv->delta = const0_rtx;
394 iv->mult = const1_rtx;
395 return true;
398 if (iv->extend_mode == mode)
399 return true;
401 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
402 return false;
404 iv->extend = UNKNOWN;
405 iv->mode = mode;
407 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
408 simplify_gen_binary (MULT, iv->extend_mode,
409 iv->base, iv->mult));
410 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
411 iv->mult = const1_rtx;
412 iv->delta = const0_rtx;
413 iv->first_special = false;
415 return true;
418 /* Evaluates application of EXTEND to MODE on IV. */
420 static bool
421 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
423 /* If iv is invariant, just calculate the new value. */
424 if (iv->step == const0_rtx
425 && !iv->first_special)
427 rtx val = get_iv_value (iv, const0_rtx);
428 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
430 iv->base = val;
431 iv->extend = UNKNOWN;
432 iv->mode = iv->extend_mode = mode;
433 iv->delta = const0_rtx;
434 iv->mult = const1_rtx;
435 return true;
438 if (mode != iv->extend_mode)
439 return false;
441 if (iv->extend != UNKNOWN
442 && iv->extend != extend)
443 return false;
445 iv->extend = extend;
447 return true;
450 /* Evaluates negation of IV. */
452 static bool
453 iv_neg (struct rtx_iv *iv)
455 if (iv->extend == UNKNOWN)
457 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
458 iv->base, iv->extend_mode);
459 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
460 iv->step, iv->extend_mode);
462 else
464 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
465 iv->delta, iv->extend_mode);
466 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
467 iv->mult, iv->extend_mode);
470 return true;
473 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
475 static bool
476 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
478 enum machine_mode mode;
479 rtx arg;
481 /* Extend the constant to extend_mode of the other operand if necessary. */
482 if (iv0->extend == UNKNOWN
483 && iv0->mode == iv0->extend_mode
484 && iv0->step == const0_rtx
485 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
487 iv0->extend_mode = iv1->extend_mode;
488 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
489 iv0->base, iv0->mode);
491 if (iv1->extend == UNKNOWN
492 && iv1->mode == iv1->extend_mode
493 && iv1->step == const0_rtx
494 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
496 iv1->extend_mode = iv0->extend_mode;
497 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
498 iv1->base, iv1->mode);
501 mode = iv0->extend_mode;
502 if (mode != iv1->extend_mode)
503 return false;
505 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
507 if (iv0->mode != iv1->mode)
508 return false;
510 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
511 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
513 return true;
516 /* Handle addition of constant. */
517 if (iv1->extend == UNKNOWN
518 && iv1->mode == mode
519 && iv1->step == const0_rtx)
521 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
522 return true;
525 if (iv0->extend == UNKNOWN
526 && iv0->mode == mode
527 && iv0->step == const0_rtx)
529 arg = iv0->base;
530 *iv0 = *iv1;
531 if (op == MINUS
532 && !iv_neg (iv0))
533 return false;
535 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
536 return true;
539 return false;
542 /* Evaluates multiplication of IV by constant CST. */
544 static bool
545 iv_mult (struct rtx_iv *iv, rtx mby)
547 enum machine_mode mode = iv->extend_mode;
549 if (GET_MODE (mby) != VOIDmode
550 && GET_MODE (mby) != mode)
551 return false;
553 if (iv->extend == UNKNOWN)
555 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
556 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
558 else
560 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
561 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
564 return true;
567 /* Evaluates shift of IV by constant CST. */
569 static bool
570 iv_shift (struct rtx_iv *iv, rtx mby)
572 enum machine_mode mode = iv->extend_mode;
574 if (GET_MODE (mby) != VOIDmode
575 && GET_MODE (mby) != mode)
576 return false;
578 if (iv->extend == UNKNOWN)
580 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
581 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
583 else
585 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
586 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
589 return true;
592 /* The recursive part of get_biv_step. Gets the value of the single value
593 defined by DEF wrto initial value of REG inside loop, in shape described
594 at get_biv_step. */
596 static bool
597 get_biv_step_1 (struct df_ref *def, rtx reg,
598 rtx *inner_step, enum machine_mode *inner_mode,
599 enum rtx_code *extend, enum machine_mode outer_mode,
600 rtx *outer_step)
602 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
603 rtx next, nextr, tmp;
604 enum rtx_code code;
605 rtx insn = DF_REF_INSN (def);
606 struct df_ref *next_def;
607 enum iv_grd_result res;
609 set = single_set (insn);
610 if (!set)
611 return false;
613 rhs = find_reg_equal_equiv_note (insn);
614 if (rhs)
615 rhs = XEXP (rhs, 0);
616 else
617 rhs = SET_SRC (set);
619 code = GET_CODE (rhs);
620 switch (code)
622 case SUBREG:
623 case REG:
624 next = rhs;
625 break;
627 case PLUS:
628 case MINUS:
629 op0 = XEXP (rhs, 0);
630 op1 = XEXP (rhs, 1);
632 if (code == PLUS && CONSTANT_P (op0))
634 tmp = op0; op0 = op1; op1 = tmp;
637 if (!simple_reg_p (op0)
638 || !CONSTANT_P (op1))
639 return false;
641 if (GET_MODE (rhs) != outer_mode)
643 /* ppc64 uses expressions like
645 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
647 this is equivalent to
649 (set x':DI (plus:DI y:DI 1))
650 (set x:SI (subreg:SI (x':DI)). */
651 if (GET_CODE (op0) != SUBREG)
652 return false;
653 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
654 return false;
657 next = op0;
658 break;
660 case SIGN_EXTEND:
661 case ZERO_EXTEND:
662 if (GET_MODE (rhs) != outer_mode)
663 return false;
665 op0 = XEXP (rhs, 0);
666 if (!simple_reg_p (op0))
667 return false;
669 next = op0;
670 break;
672 default:
673 return false;
676 if (GET_CODE (next) == SUBREG)
678 if (!subreg_lowpart_p (next))
679 return false;
681 nextr = SUBREG_REG (next);
682 if (GET_MODE (nextr) != outer_mode)
683 return false;
685 else
686 nextr = next;
688 res = iv_get_reaching_def (insn, nextr, &next_def);
690 if (res == GRD_INVALID || res == GRD_INVARIANT)
691 return false;
693 if (res == GRD_MAYBE_BIV)
695 if (!rtx_equal_p (nextr, reg))
696 return false;
698 *inner_step = const0_rtx;
699 *extend = UNKNOWN;
700 *inner_mode = outer_mode;
701 *outer_step = const0_rtx;
703 else if (!get_biv_step_1 (next_def, reg,
704 inner_step, inner_mode, extend, outer_mode,
705 outer_step))
706 return false;
708 if (GET_CODE (next) == SUBREG)
710 enum machine_mode amode = GET_MODE (next);
712 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
713 return false;
715 *inner_mode = amode;
716 *inner_step = simplify_gen_binary (PLUS, outer_mode,
717 *inner_step, *outer_step);
718 *outer_step = const0_rtx;
719 *extend = UNKNOWN;
722 switch (code)
724 case REG:
725 case SUBREG:
726 break;
728 case PLUS:
729 case MINUS:
730 if (*inner_mode == outer_mode
731 /* See comment in previous switch. */
732 || GET_MODE (rhs) != outer_mode)
733 *inner_step = simplify_gen_binary (code, outer_mode,
734 *inner_step, op1);
735 else
736 *outer_step = simplify_gen_binary (code, outer_mode,
737 *outer_step, op1);
738 break;
740 case SIGN_EXTEND:
741 case ZERO_EXTEND:
742 gcc_assert (GET_MODE (op0) == *inner_mode
743 && *extend == UNKNOWN
744 && *outer_step == const0_rtx);
746 *extend = code;
747 break;
749 default:
750 return false;
753 return true;
756 /* Gets the operation on register REG inside loop, in shape
758 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
760 If the operation cannot be described in this shape, return false.
761 LAST_DEF is the definition of REG that dominates loop latch. */
763 static bool
764 get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
765 enum machine_mode *inner_mode, enum rtx_code *extend,
766 enum machine_mode *outer_mode, rtx *outer_step)
768 *outer_mode = GET_MODE (reg);
770 if (!get_biv_step_1 (last_def, reg,
771 inner_step, inner_mode, extend, *outer_mode,
772 outer_step))
773 return false;
775 gcc_assert ((*inner_mode == *outer_mode) != (*extend != UNKNOWN));
776 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
778 return true;
781 /* Records information that DEF is induction variable IV. */
783 static void
784 record_iv (struct df_ref *def, struct rtx_iv *iv)
786 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
788 *recorded_iv = *iv;
789 DF_REF_IV_SET (def, recorded_iv);
792 /* If DEF was already analyzed for bivness, store the description of the biv to
793 IV and return true. Otherwise return false. */
795 static bool
796 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
798 struct biv_entry *biv = htab_find_with_hash (bivs, def, REGNO (def));
800 if (!biv)
801 return false;
803 *iv = biv->iv;
804 return true;
807 static void
808 record_biv (rtx def, struct rtx_iv *iv)
810 struct biv_entry *biv = XNEW (struct biv_entry);
811 void **slot = htab_find_slot_with_hash (bivs, def, REGNO (def), INSERT);
813 biv->regno = REGNO (def);
814 biv->iv = *iv;
815 gcc_assert (!*slot);
816 *slot = biv;
819 /* Determines whether DEF is a biv and if so, stores its description
820 to *IV. */
822 static bool
823 iv_analyze_biv (rtx def, struct rtx_iv *iv)
825 rtx inner_step, outer_step;
826 enum machine_mode inner_mode, outer_mode;
827 enum rtx_code extend;
828 struct df_ref *last_def;
830 if (dump_file)
832 fprintf (dump_file, "Analyzing ");
833 print_rtl (dump_file, def);
834 fprintf (dump_file, " for bivness.\n");
837 if (!REG_P (def))
839 if (!CONSTANT_P (def))
840 return false;
842 return iv_constant (iv, def, VOIDmode);
845 if (!latch_dominating_def (def, &last_def))
847 if (dump_file)
848 fprintf (dump_file, " not simple.\n");
849 return false;
852 if (!last_def)
853 return iv_constant (iv, def, VOIDmode);
855 if (analyzed_for_bivness_p (def, iv))
857 if (dump_file)
858 fprintf (dump_file, " already analysed.\n");
859 return iv->base != NULL_RTX;
862 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
863 &outer_mode, &outer_step))
865 iv->base = NULL_RTX;
866 goto end;
869 /* Loop transforms base to es (base + inner_step) + outer_step,
870 where es means extend of subreg between inner_mode and outer_mode.
871 The corresponding induction variable is
873 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
875 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
876 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
877 iv->mode = inner_mode;
878 iv->extend_mode = outer_mode;
879 iv->extend = extend;
880 iv->mult = const1_rtx;
881 iv->delta = outer_step;
882 iv->first_special = inner_mode != outer_mode;
884 end:
885 if (dump_file)
887 fprintf (dump_file, " ");
888 dump_iv_info (dump_file, iv);
889 fprintf (dump_file, "\n");
892 record_biv (def, iv);
893 return iv->base != NULL_RTX;
896 /* Analyzes expression RHS used at INSN and stores the result to *IV.
897 The mode of the induction variable is MODE. */
899 bool
900 iv_analyze_expr (rtx insn, rtx rhs, enum machine_mode mode, struct rtx_iv *iv)
902 rtx mby = NULL_RTX, tmp;
903 rtx op0 = NULL_RTX, op1 = NULL_RTX;
904 struct rtx_iv iv0, iv1;
905 enum rtx_code code = GET_CODE (rhs);
906 enum machine_mode omode = mode;
908 iv->mode = VOIDmode;
909 iv->base = NULL_RTX;
910 iv->step = NULL_RTX;
912 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
914 if (CONSTANT_P (rhs)
915 || REG_P (rhs)
916 || code == SUBREG)
918 if (!iv_analyze_op (insn, rhs, iv))
919 return false;
921 if (iv->mode == VOIDmode)
923 iv->mode = mode;
924 iv->extend_mode = mode;
927 return true;
930 switch (code)
932 case REG:
933 op0 = rhs;
934 break;
936 case SIGN_EXTEND:
937 case ZERO_EXTEND:
938 case NEG:
939 op0 = XEXP (rhs, 0);
940 omode = GET_MODE (op0);
941 break;
943 case PLUS:
944 case MINUS:
945 op0 = XEXP (rhs, 0);
946 op1 = XEXP (rhs, 1);
947 break;
949 case MULT:
950 op0 = XEXP (rhs, 0);
951 mby = XEXP (rhs, 1);
952 if (!CONSTANT_P (mby))
954 tmp = op0;
955 op0 = mby;
956 mby = tmp;
958 if (!CONSTANT_P (mby))
959 return false;
960 break;
962 case ASHIFT:
963 op0 = XEXP (rhs, 0);
964 mby = XEXP (rhs, 1);
965 if (!CONSTANT_P (mby))
966 return false;
967 break;
969 default:
970 return false;
973 if (op0
974 && !iv_analyze_expr (insn, op0, omode, &iv0))
975 return false;
977 if (op1
978 && !iv_analyze_expr (insn, op1, omode, &iv1))
979 return false;
981 switch (code)
983 case SIGN_EXTEND:
984 case ZERO_EXTEND:
985 if (!iv_extend (&iv0, code, mode))
986 return false;
987 break;
989 case NEG:
990 if (!iv_neg (&iv0))
991 return false;
992 break;
994 case PLUS:
995 case MINUS:
996 if (!iv_add (&iv0, &iv1, code))
997 return false;
998 break;
1000 case MULT:
1001 if (!iv_mult (&iv0, mby))
1002 return false;
1003 break;
1005 case ASHIFT:
1006 if (!iv_shift (&iv0, mby))
1007 return false;
1008 break;
1010 default:
1011 break;
1014 *iv = iv0;
1015 return iv->base != NULL_RTX;
1018 /* Analyzes iv DEF and stores the result to *IV. */
1020 static bool
1021 iv_analyze_def (struct df_ref *def, struct rtx_iv *iv)
1023 rtx insn = DF_REF_INSN (def);
1024 rtx reg = DF_REF_REG (def);
1025 rtx set, rhs;
1027 if (dump_file)
1029 fprintf (dump_file, "Analysing def of ");
1030 print_rtl (dump_file, reg);
1031 fprintf (dump_file, " in insn ");
1032 print_rtl_single (dump_file, insn);
1035 if (DF_REF_IV (def))
1037 if (dump_file)
1038 fprintf (dump_file, " already analysed.\n");
1039 *iv = *DF_REF_IV (def);
1040 return iv->base != NULL_RTX;
1043 iv->mode = VOIDmode;
1044 iv->base = NULL_RTX;
1045 iv->step = NULL_RTX;
1047 set = single_set (insn);
1048 if (!set || SET_DEST (set) != reg)
1049 return false;
1051 rhs = find_reg_equal_equiv_note (insn);
1052 if (rhs)
1053 rhs = XEXP (rhs, 0);
1054 else
1055 rhs = SET_SRC (set);
1057 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1058 record_iv (def, iv);
1060 if (dump_file)
1062 print_rtl (dump_file, reg);
1063 fprintf (dump_file, " in insn ");
1064 print_rtl_single (dump_file, insn);
1065 fprintf (dump_file, " is ");
1066 dump_iv_info (dump_file, iv);
1067 fprintf (dump_file, "\n");
1070 return iv->base != NULL_RTX;
1073 /* Analyzes operand OP of INSN and stores the result to *IV. */
1075 static bool
1076 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
1078 struct df_ref *def = NULL;
1079 enum iv_grd_result res;
1081 if (dump_file)
1083 fprintf (dump_file, "Analysing operand ");
1084 print_rtl (dump_file, op);
1085 fprintf (dump_file, " of insn ");
1086 print_rtl_single (dump_file, insn);
1089 if (CONSTANT_P (op))
1090 res = GRD_INVARIANT;
1091 else if (GET_CODE (op) == SUBREG)
1093 if (!subreg_lowpart_p (op))
1094 return false;
1096 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1097 return false;
1099 return iv_subreg (iv, GET_MODE (op));
1101 else
1103 res = iv_get_reaching_def (insn, op, &def);
1104 if (res == GRD_INVALID)
1106 if (dump_file)
1107 fprintf (dump_file, " not simple.\n");
1108 return false;
1112 if (res == GRD_INVARIANT)
1114 iv_constant (iv, op, VOIDmode);
1116 if (dump_file)
1118 fprintf (dump_file, " ");
1119 dump_iv_info (dump_file, iv);
1120 fprintf (dump_file, "\n");
1122 return true;
1125 if (res == GRD_MAYBE_BIV)
1126 return iv_analyze_biv (op, iv);
1128 return iv_analyze_def (def, iv);
1131 /* Analyzes value VAL at INSN and stores the result to *IV. */
1133 bool
1134 iv_analyze (rtx insn, rtx val, struct rtx_iv *iv)
1136 rtx reg;
1138 /* We must find the insn in that val is used, so that we get to UD chains.
1139 Since the function is sometimes called on result of get_condition,
1140 this does not necessarily have to be directly INSN; scan also the
1141 following insns. */
1142 if (simple_reg_p (val))
1144 if (GET_CODE (val) == SUBREG)
1145 reg = SUBREG_REG (val);
1146 else
1147 reg = val;
1149 while (!df_find_use (df, insn, reg))
1150 insn = NEXT_INSN (insn);
1153 return iv_analyze_op (insn, val, iv);
1156 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1158 bool
1159 iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
1161 struct df_ref *adef;
1163 adef = df_find_def (df, insn, def);
1164 if (!adef)
1165 return false;
1167 return iv_analyze_def (adef, iv);
1170 /* Checks whether definition of register REG in INSN is a basic induction
1171 variable. IV analysis must have been initialized (via a call to
1172 iv_analysis_loop_init) for this function to produce a result. */
1174 bool
1175 biv_p (rtx insn, rtx reg)
1177 struct rtx_iv iv;
1178 struct df_ref *def, *last_def;
1180 if (!simple_reg_p (reg))
1181 return false;
1183 def = df_find_def (df, insn, reg);
1184 gcc_assert (def != NULL);
1185 if (!latch_dominating_def (reg, &last_def))
1186 return false;
1187 if (last_def != def)
1188 return false;
1190 if (!iv_analyze_biv (reg, &iv))
1191 return false;
1193 return iv.step != const0_rtx;
1196 /* Calculates value of IV at ITERATION-th iteration. */
1199 get_iv_value (struct rtx_iv *iv, rtx iteration)
1201 rtx val;
1203 /* We would need to generate some if_then_else patterns, and so far
1204 it is not needed anywhere. */
1205 gcc_assert (!iv->first_special);
1207 if (iv->step != const0_rtx && iteration != const0_rtx)
1208 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1209 simplify_gen_binary (MULT, iv->extend_mode,
1210 iv->step, iteration));
1211 else
1212 val = iv->base;
1214 if (iv->extend_mode == iv->mode)
1215 return val;
1217 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1219 if (iv->extend == UNKNOWN)
1220 return val;
1222 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1223 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1224 simplify_gen_binary (MULT, iv->extend_mode,
1225 iv->mult, val));
1227 return val;
1230 /* Free the data for an induction variable analysis. */
1232 void
1233 iv_analysis_done (void)
1235 if (df)
1237 clear_iv_info ();
1238 df_finish (df);
1239 df = NULL;
1240 htab_delete (bivs);
1241 bivs = NULL;
1245 /* Computes inverse to X modulo (1 << MOD). */
1247 static unsigned HOST_WIDEST_INT
1248 inverse (unsigned HOST_WIDEST_INT x, int mod)
1250 unsigned HOST_WIDEST_INT mask =
1251 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1252 unsigned HOST_WIDEST_INT rslt = 1;
1253 int i;
1255 for (i = 0; i < mod - 1; i++)
1257 rslt = (rslt * x) & mask;
1258 x = (x * x) & mask;
1261 return rslt;
1264 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1266 static int
1267 altered_reg_used (rtx *reg, void *alt)
1269 if (!REG_P (*reg))
1270 return 0;
1272 return REGNO_REG_SET_P (alt, REGNO (*reg));
1275 /* Marks registers altered by EXPR in set ALT. */
1277 static void
1278 mark_altered (rtx expr, rtx by ATTRIBUTE_UNUSED, void *alt)
1280 if (GET_CODE (expr) == SUBREG)
1281 expr = SUBREG_REG (expr);
1282 if (!REG_P (expr))
1283 return;
1285 SET_REGNO_REG_SET (alt, REGNO (expr));
1288 /* Checks whether RHS is simple enough to process. */
1290 static bool
1291 simple_rhs_p (rtx rhs)
1293 rtx op0, op1;
1295 if (CONSTANT_P (rhs)
1296 || REG_P (rhs))
1297 return true;
1299 switch (GET_CODE (rhs))
1301 case PLUS:
1302 case MINUS:
1303 op0 = XEXP (rhs, 0);
1304 op1 = XEXP (rhs, 1);
1305 /* Allow reg + const sets only. */
1306 if (REG_P (op0) && CONSTANT_P (op1))
1307 return true;
1308 if (REG_P (op1) && CONSTANT_P (op0))
1309 return true;
1311 return false;
1313 default:
1314 return false;
1318 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1319 altered so far. */
1321 static void
1322 simplify_using_assignment (rtx insn, rtx *expr, regset altered)
1324 rtx set = single_set (insn);
1325 rtx lhs = NULL_RTX, rhs;
1326 bool ret = false;
1328 if (set)
1330 lhs = SET_DEST (set);
1331 if (!REG_P (lhs)
1332 || altered_reg_used (&lhs, altered))
1333 ret = true;
1335 else
1336 ret = true;
1338 note_stores (PATTERN (insn), mark_altered, altered);
1339 if (CALL_P (insn))
1341 int i;
1343 /* Kill all call clobbered registers. */
1344 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1345 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1346 SET_REGNO_REG_SET (altered, i);
1349 if (ret)
1350 return;
1352 rhs = find_reg_equal_equiv_note (insn);
1353 if (rhs)
1354 rhs = XEXP (rhs, 0);
1355 else
1356 rhs = SET_SRC (set);
1358 if (!simple_rhs_p (rhs))
1359 return;
1361 if (for_each_rtx (&rhs, altered_reg_used, altered))
1362 return;
1364 *expr = simplify_replace_rtx (*expr, lhs, rhs);
1367 /* Checks whether A implies B. */
1369 static bool
1370 implies_p (rtx a, rtx b)
1372 rtx op0, op1, opb0, opb1, r;
1373 enum machine_mode mode;
1375 if (GET_CODE (a) == EQ)
1377 op0 = XEXP (a, 0);
1378 op1 = XEXP (a, 1);
1380 if (REG_P (op0))
1382 r = simplify_replace_rtx (b, op0, op1);
1383 if (r == const_true_rtx)
1384 return true;
1387 if (REG_P (op1))
1389 r = simplify_replace_rtx (b, op1, op0);
1390 if (r == const_true_rtx)
1391 return true;
1395 if (b == const_true_rtx)
1396 return true;
1398 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1399 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1400 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1401 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1402 return false;
1404 op0 = XEXP (a, 0);
1405 op1 = XEXP (a, 1);
1406 opb0 = XEXP (b, 0);
1407 opb1 = XEXP (b, 1);
1409 mode = GET_MODE (op0);
1410 if (mode != GET_MODE (opb0))
1411 mode = VOIDmode;
1412 else if (mode == VOIDmode)
1414 mode = GET_MODE (op1);
1415 if (mode != GET_MODE (opb1))
1416 mode = VOIDmode;
1419 /* A < B implies A + 1 <= B. */
1420 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1421 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1424 if (GET_CODE (a) == GT)
1426 r = op0;
1427 op0 = op1;
1428 op1 = r;
1431 if (GET_CODE (b) == GE)
1433 r = opb0;
1434 opb0 = opb1;
1435 opb1 = r;
1438 if (SCALAR_INT_MODE_P (mode)
1439 && rtx_equal_p (op1, opb1)
1440 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1441 return true;
1442 return false;
1445 /* A < B or A > B imply A != B. TODO: Likewise
1446 A + n < B implies A != B + n if neither wraps. */
1447 if (GET_CODE (b) == NE
1448 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1449 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1451 if (rtx_equal_p (op0, opb0)
1452 && rtx_equal_p (op1, opb1))
1453 return true;
1456 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1457 if (GET_CODE (a) == NE
1458 && op1 == const0_rtx)
1460 if ((GET_CODE (b) == GTU
1461 && opb1 == const0_rtx)
1462 || (GET_CODE (b) == GEU
1463 && opb1 == const1_rtx))
1464 return rtx_equal_p (op0, opb0);
1467 /* A != N is equivalent to A - (N + 1) <u -1. */
1468 if (GET_CODE (a) == NE
1469 && GET_CODE (op1) == CONST_INT
1470 && GET_CODE (b) == LTU
1471 && opb1 == constm1_rtx
1472 && GET_CODE (opb0) == PLUS
1473 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1474 /* Avoid overflows. */
1475 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1476 != ((unsigned HOST_WIDE_INT)1
1477 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1478 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1479 return rtx_equal_p (op0, XEXP (opb0, 0));
1481 /* Likewise, A != N implies A - N > 0. */
1482 if (GET_CODE (a) == NE
1483 && GET_CODE (op1) == CONST_INT)
1485 if (GET_CODE (b) == GTU
1486 && GET_CODE (opb0) == PLUS
1487 && opb1 == const0_rtx
1488 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1489 /* Avoid overflows. */
1490 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1491 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1492 && rtx_equal_p (XEXP (opb0, 0), op0))
1493 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1494 if (GET_CODE (b) == GEU
1495 && GET_CODE (opb0) == PLUS
1496 && opb1 == const1_rtx
1497 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1498 /* Avoid overflows. */
1499 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1500 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1501 && rtx_equal_p (XEXP (opb0, 0), op0))
1502 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1505 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1506 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1507 && GET_CODE (op1) == CONST_INT
1508 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1509 || INTVAL (op1) >= 0)
1510 && GET_CODE (b) == LTU
1511 && GET_CODE (opb1) == CONST_INT)
1512 return INTVAL (opb1) < 0;
1514 return false;
1517 /* Canonicalizes COND so that
1519 (1) Ensure that operands are ordered according to
1520 swap_commutative_operands_p.
1521 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1522 for GE, GEU, and LEU. */
1525 canon_condition (rtx cond)
1527 rtx tem;
1528 rtx op0, op1;
1529 enum rtx_code code;
1530 enum machine_mode mode;
1532 code = GET_CODE (cond);
1533 op0 = XEXP (cond, 0);
1534 op1 = XEXP (cond, 1);
1536 if (swap_commutative_operands_p (op0, op1))
1538 code = swap_condition (code);
1539 tem = op0;
1540 op0 = op1;
1541 op1 = tem;
1544 mode = GET_MODE (op0);
1545 if (mode == VOIDmode)
1546 mode = GET_MODE (op1);
1547 gcc_assert (mode != VOIDmode);
1549 if (GET_CODE (op1) == CONST_INT
1550 && GET_MODE_CLASS (mode) != MODE_CC
1551 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1553 HOST_WIDE_INT const_val = INTVAL (op1);
1554 unsigned HOST_WIDE_INT uconst_val = const_val;
1555 unsigned HOST_WIDE_INT max_val
1556 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1558 switch (code)
1560 case LE:
1561 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1562 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1563 break;
1565 /* When cross-compiling, const_val might be sign-extended from
1566 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1567 case GE:
1568 if ((HOST_WIDE_INT) (const_val & max_val)
1569 != (((HOST_WIDE_INT) 1
1570 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1571 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1572 break;
1574 case LEU:
1575 if (uconst_val < max_val)
1576 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1577 break;
1579 case GEU:
1580 if (uconst_val != 0)
1581 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1582 break;
1584 default:
1585 break;
1589 if (op0 != XEXP (cond, 0)
1590 || op1 != XEXP (cond, 1)
1591 || code != GET_CODE (cond)
1592 || GET_MODE (cond) != SImode)
1593 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1595 return cond;
1598 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1599 set of altered regs. */
1601 void
1602 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1604 rtx rev, reve, exp = *expr;
1606 if (!COMPARISON_P (exp))
1607 return;
1609 /* If some register gets altered later, we do not really speak about its
1610 value at the time of comparison. */
1611 if (altered
1612 && for_each_rtx (&cond, altered_reg_used, altered))
1613 return;
1615 rev = reversed_condition (cond);
1616 reve = reversed_condition (exp);
1618 cond = canon_condition (cond);
1619 exp = canon_condition (exp);
1620 if (rev)
1621 rev = canon_condition (rev);
1622 if (reve)
1623 reve = canon_condition (reve);
1625 if (rtx_equal_p (exp, cond))
1627 *expr = const_true_rtx;
1628 return;
1632 if (rev && rtx_equal_p (exp, rev))
1634 *expr = const0_rtx;
1635 return;
1638 if (implies_p (cond, exp))
1640 *expr = const_true_rtx;
1641 return;
1644 if (reve && implies_p (cond, reve))
1646 *expr = const0_rtx;
1647 return;
1650 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1651 be false. */
1652 if (rev && implies_p (exp, rev))
1654 *expr = const0_rtx;
1655 return;
1658 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1659 if (rev && reve && implies_p (reve, rev))
1661 *expr = const_true_rtx;
1662 return;
1665 /* We would like to have some other tests here. TODO. */
1667 return;
1670 /* Use relationship between A and *B to eventually eliminate *B.
1671 OP is the operation we consider. */
1673 static void
1674 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1676 switch (op)
1678 case AND:
1679 /* If A implies *B, we may replace *B by true. */
1680 if (implies_p (a, *b))
1681 *b = const_true_rtx;
1682 break;
1684 case IOR:
1685 /* If *B implies A, we may replace *B by false. */
1686 if (implies_p (*b, a))
1687 *b = const0_rtx;
1688 break;
1690 default:
1691 gcc_unreachable ();
1695 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1696 operation we consider. */
1698 static void
1699 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1701 rtx elt;
1703 for (elt = tail; elt; elt = XEXP (elt, 1))
1704 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1705 for (elt = tail; elt; elt = XEXP (elt, 1))
1706 eliminate_implied_condition (op, XEXP (elt, 0), head);
1709 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1710 is a list, its elements are assumed to be combined using OP. */
1712 static void
1713 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1715 rtx head, tail, insn;
1716 rtx neutral, aggr;
1717 regset altered;
1718 edge e;
1720 if (!*expr)
1721 return;
1723 if (CONSTANT_P (*expr))
1724 return;
1726 if (GET_CODE (*expr) == EXPR_LIST)
1728 head = XEXP (*expr, 0);
1729 tail = XEXP (*expr, 1);
1731 eliminate_implied_conditions (op, &head, tail);
1733 switch (op)
1735 case AND:
1736 neutral = const_true_rtx;
1737 aggr = const0_rtx;
1738 break;
1740 case IOR:
1741 neutral = const0_rtx;
1742 aggr = const_true_rtx;
1743 break;
1745 default:
1746 gcc_unreachable ();
1749 simplify_using_initial_values (loop, UNKNOWN, &head);
1750 if (head == aggr)
1752 XEXP (*expr, 0) = aggr;
1753 XEXP (*expr, 1) = NULL_RTX;
1754 return;
1756 else if (head == neutral)
1758 *expr = tail;
1759 simplify_using_initial_values (loop, op, expr);
1760 return;
1762 simplify_using_initial_values (loop, op, &tail);
1764 if (tail && XEXP (tail, 0) == aggr)
1766 *expr = tail;
1767 return;
1770 XEXP (*expr, 0) = head;
1771 XEXP (*expr, 1) = tail;
1772 return;
1775 gcc_assert (op == UNKNOWN);
1777 e = loop_preheader_edge (loop);
1778 if (e->src == ENTRY_BLOCK_PTR)
1779 return;
1781 altered = ALLOC_REG_SET (&reg_obstack);
1783 while (1)
1785 insn = BB_END (e->src);
1786 if (any_condjump_p (insn))
1788 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1790 if (cond && (e->flags & EDGE_FALLTHRU))
1791 cond = reversed_condition (cond);
1792 if (cond)
1794 simplify_using_condition (cond, expr, altered);
1795 if (CONSTANT_P (*expr))
1797 FREE_REG_SET (altered);
1798 return;
1803 FOR_BB_INSNS_REVERSE (e->src, insn)
1805 if (!INSN_P (insn))
1806 continue;
1808 simplify_using_assignment (insn, expr, altered);
1809 if (CONSTANT_P (*expr))
1811 FREE_REG_SET (altered);
1812 return;
1814 if (for_each_rtx (expr, altered_reg_used, altered))
1815 return;
1818 if (!single_pred_p (e->src)
1819 || single_pred (e->src) == ENTRY_BLOCK_PTR)
1820 break;
1821 e = single_pred_edge (e->src);
1824 FREE_REG_SET (altered);
1827 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1828 that IV occurs as left operands of comparison COND and its signedness
1829 is SIGNED_P to DESC. */
1831 static void
1832 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
1833 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
1835 rtx mmin, mmax, cond_over, cond_under;
1837 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
1838 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
1839 iv->base, mmin);
1840 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
1841 iv->base, mmax);
1843 switch (cond)
1845 case LE:
1846 case LT:
1847 case LEU:
1848 case LTU:
1849 if (cond_under != const0_rtx)
1850 desc->infinite =
1851 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1852 if (cond_over != const0_rtx)
1853 desc->noloop_assumptions =
1854 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
1855 break;
1857 case GE:
1858 case GT:
1859 case GEU:
1860 case GTU:
1861 if (cond_over != const0_rtx)
1862 desc->infinite =
1863 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1864 if (cond_under != const0_rtx)
1865 desc->noloop_assumptions =
1866 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
1867 break;
1869 case NE:
1870 if (cond_over != const0_rtx)
1871 desc->infinite =
1872 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1873 if (cond_under != const0_rtx)
1874 desc->infinite =
1875 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1876 break;
1878 default:
1879 gcc_unreachable ();
1882 iv->mode = mode;
1883 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
1886 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1887 subregs of the same mode if possible (sometimes it is necessary to add
1888 some assumptions to DESC). */
1890 static bool
1891 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
1892 enum rtx_code cond, struct niter_desc *desc)
1894 enum machine_mode comp_mode;
1895 bool signed_p;
1897 /* If the ivs behave specially in the first iteration, or are
1898 added/multiplied after extending, we ignore them. */
1899 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
1900 return false;
1901 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
1902 return false;
1904 /* If there is some extend, it must match signedness of the comparison. */
1905 switch (cond)
1907 case LE:
1908 case LT:
1909 if (iv0->extend == ZERO_EXTEND
1910 || iv1->extend == ZERO_EXTEND)
1911 return false;
1912 signed_p = true;
1913 break;
1915 case LEU:
1916 case LTU:
1917 if (iv0->extend == SIGN_EXTEND
1918 || iv1->extend == SIGN_EXTEND)
1919 return false;
1920 signed_p = false;
1921 break;
1923 case NE:
1924 if (iv0->extend != UNKNOWN
1925 && iv1->extend != UNKNOWN
1926 && iv0->extend != iv1->extend)
1927 return false;
1929 signed_p = false;
1930 if (iv0->extend != UNKNOWN)
1931 signed_p = iv0->extend == SIGN_EXTEND;
1932 if (iv1->extend != UNKNOWN)
1933 signed_p = iv1->extend == SIGN_EXTEND;
1934 break;
1936 default:
1937 gcc_unreachable ();
1940 /* Values of both variables should be computed in the same mode. These
1941 might indeed be different, if we have comparison like
1943 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1945 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1946 in different modes. This does not seem impossible to handle, but
1947 it hardly ever occurs in practice.
1949 The only exception is the case when one of operands is invariant.
1950 For example pentium 3 generates comparisons like
1951 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1952 definitely do not want this prevent the optimization. */
1953 comp_mode = iv0->extend_mode;
1954 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
1955 comp_mode = iv1->extend_mode;
1957 if (iv0->extend_mode != comp_mode)
1959 if (iv0->mode != iv0->extend_mode
1960 || iv0->step != const0_rtx)
1961 return false;
1963 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1964 comp_mode, iv0->base, iv0->mode);
1965 iv0->extend_mode = comp_mode;
1968 if (iv1->extend_mode != comp_mode)
1970 if (iv1->mode != iv1->extend_mode
1971 || iv1->step != const0_rtx)
1972 return false;
1974 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1975 comp_mode, iv1->base, iv1->mode);
1976 iv1->extend_mode = comp_mode;
1979 /* Check that both ivs belong to a range of a single mode. If one of the
1980 operands is an invariant, we may need to shorten it into the common
1981 mode. */
1982 if (iv0->mode == iv0->extend_mode
1983 && iv0->step == const0_rtx
1984 && iv0->mode != iv1->mode)
1985 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
1987 if (iv1->mode == iv1->extend_mode
1988 && iv1->step == const0_rtx
1989 && iv0->mode != iv1->mode)
1990 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
1992 if (iv0->mode != iv1->mode)
1993 return false;
1995 desc->mode = iv0->mode;
1996 desc->signed_p = signed_p;
1998 return true;
2001 /* Tries to estimate the maximum number of iterations. */
2003 static unsigned HOST_WIDEST_INT
2004 determine_max_iter (struct loop *loop, struct niter_desc *desc)
2006 rtx niter = desc->niter_expr;
2007 rtx mmin, mmax, cmp;
2008 unsigned HOST_WIDEST_INT nmax, inc;
2010 if (GET_CODE (niter) == AND
2011 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
2013 nmax = INTVAL (XEXP (niter, 0));
2014 if (!(nmax & (nmax + 1)))
2016 desc->niter_max = nmax;
2017 return nmax;
2021 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2022 nmax = INTVAL (mmax) - INTVAL (mmin);
2024 if (GET_CODE (niter) == UDIV)
2026 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
2028 desc->niter_max = nmax;
2029 return nmax;
2031 inc = INTVAL (XEXP (niter, 1));
2032 niter = XEXP (niter, 0);
2034 else
2035 inc = 1;
2037 /* We could use a binary search here, but for now improving the upper
2038 bound by just one eliminates one important corner case. */
2039 cmp = gen_rtx_fmt_ee (desc->signed_p ? LT : LTU, VOIDmode, niter, mmax);
2040 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2041 if (cmp == const_true_rtx)
2043 nmax--;
2045 if (dump_file)
2046 fprintf (dump_file, ";; improved upper bound by one.\n");
2048 desc->niter_max = nmax / inc;
2049 return nmax / inc;
2052 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2053 the result into DESC. Very similar to determine_number_of_iterations
2054 (basically its rtl version), complicated by things like subregs. */
2056 static void
2057 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2058 struct niter_desc *desc)
2060 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2061 struct rtx_iv iv0, iv1, tmp_iv;
2062 rtx assumption, may_not_xform;
2063 enum rtx_code cond;
2064 enum machine_mode mode, comp_mode;
2065 rtx mmin, mmax, mode_mmin, mode_mmax;
2066 unsigned HOST_WIDEST_INT s, size, d, inv;
2067 HOST_WIDEST_INT up, down, inc, step_val;
2068 int was_sharp = false;
2069 rtx old_niter;
2070 bool step_is_pow2;
2072 /* The meaning of these assumptions is this:
2073 if !assumptions
2074 then the rest of information does not have to be valid
2075 if noloop_assumptions then the loop does not roll
2076 if infinite then this exit is never used */
2078 desc->assumptions = NULL_RTX;
2079 desc->noloop_assumptions = NULL_RTX;
2080 desc->infinite = NULL_RTX;
2081 desc->simple_p = true;
2083 desc->const_iter = false;
2084 desc->niter_expr = NULL_RTX;
2085 desc->niter_max = 0;
2087 cond = GET_CODE (condition);
2088 gcc_assert (COMPARISON_P (condition));
2090 mode = GET_MODE (XEXP (condition, 0));
2091 if (mode == VOIDmode)
2092 mode = GET_MODE (XEXP (condition, 1));
2093 /* The constant comparisons should be folded. */
2094 gcc_assert (mode != VOIDmode);
2096 /* We only handle integers or pointers. */
2097 if (GET_MODE_CLASS (mode) != MODE_INT
2098 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2099 goto fail;
2101 op0 = XEXP (condition, 0);
2102 if (!iv_analyze (insn, op0, &iv0))
2103 goto fail;
2104 if (iv0.extend_mode == VOIDmode)
2105 iv0.mode = iv0.extend_mode = mode;
2107 op1 = XEXP (condition, 1);
2108 if (!iv_analyze (insn, op1, &iv1))
2109 goto fail;
2110 if (iv1.extend_mode == VOIDmode)
2111 iv1.mode = iv1.extend_mode = mode;
2113 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2114 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2115 goto fail;
2117 /* Check condition and normalize it. */
2119 switch (cond)
2121 case GE:
2122 case GT:
2123 case GEU:
2124 case GTU:
2125 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2126 cond = swap_condition (cond);
2127 break;
2128 case NE:
2129 case LE:
2130 case LEU:
2131 case LT:
2132 case LTU:
2133 break;
2134 default:
2135 goto fail;
2138 /* Handle extends. This is relatively nontrivial, so we only try in some
2139 easy cases, when we can canonicalize the ivs (possibly by adding some
2140 assumptions) to shape subreg (base + i * step). This function also fills
2141 in desc->mode and desc->signed_p. */
2143 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2144 goto fail;
2146 comp_mode = iv0.extend_mode;
2147 mode = iv0.mode;
2148 size = GET_MODE_BITSIZE (mode);
2149 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2150 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2151 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2153 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2154 goto fail;
2156 /* We can take care of the case of two induction variables chasing each other
2157 if the test is NE. I have never seen a loop using it, but still it is
2158 cool. */
2159 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2161 if (cond != NE)
2162 goto fail;
2164 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2165 iv1.step = const0_rtx;
2168 /* This is either infinite loop or the one that ends immediately, depending
2169 on initial values. Unswitching should remove this kind of conditions. */
2170 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2171 goto fail;
2173 if (cond != NE)
2175 if (iv0.step == const0_rtx)
2176 step_val = -INTVAL (iv1.step);
2177 else
2178 step_val = INTVAL (iv0.step);
2180 /* Ignore loops of while (i-- < 10) type. */
2181 if (step_val < 0)
2182 goto fail;
2184 step_is_pow2 = !(step_val & (step_val - 1));
2186 else
2188 /* We do not care about whether the step is power of two in this
2189 case. */
2190 step_is_pow2 = false;
2191 step_val = 0;
2194 /* Some more condition normalization. We must record some assumptions
2195 due to overflows. */
2196 switch (cond)
2198 case LT:
2199 case LTU:
2200 /* We want to take care only of non-sharp relationals; this is easy,
2201 as in cases the overflow would make the transformation unsafe
2202 the loop does not roll. Seemingly it would make more sense to want
2203 to take care of sharp relationals instead, as NE is more similar to
2204 them, but the problem is that here the transformation would be more
2205 difficult due to possibly infinite loops. */
2206 if (iv0.step == const0_rtx)
2208 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2209 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2210 mode_mmax);
2211 if (assumption == const_true_rtx)
2212 goto zero_iter_simplify;
2213 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2214 iv0.base, const1_rtx);
2216 else
2218 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2219 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2220 mode_mmin);
2221 if (assumption == const_true_rtx)
2222 goto zero_iter_simplify;
2223 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2224 iv1.base, constm1_rtx);
2227 if (assumption != const0_rtx)
2228 desc->noloop_assumptions =
2229 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2230 cond = (cond == LT) ? LE : LEU;
2232 /* It will be useful to be able to tell the difference once more in
2233 LE -> NE reduction. */
2234 was_sharp = true;
2235 break;
2236 default: ;
2239 /* Take care of trivially infinite loops. */
2240 if (cond != NE)
2242 if (iv0.step == const0_rtx)
2244 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2245 if (rtx_equal_p (tmp, mode_mmin))
2247 desc->infinite =
2248 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2249 /* Fill in the remaining fields somehow. */
2250 goto zero_iter_simplify;
2253 else
2255 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2256 if (rtx_equal_p (tmp, mode_mmax))
2258 desc->infinite =
2259 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2260 /* Fill in the remaining fields somehow. */
2261 goto zero_iter_simplify;
2266 /* If we can we want to take care of NE conditions instead of size
2267 comparisons, as they are much more friendly (most importantly
2268 this takes care of special handling of loops with step 1). We can
2269 do it if we first check that upper bound is greater or equal to
2270 lower bound, their difference is constant c modulo step and that
2271 there is not an overflow. */
2272 if (cond != NE)
2274 if (iv0.step == const0_rtx)
2275 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2276 else
2277 step = iv0.step;
2278 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2279 delta = lowpart_subreg (mode, delta, comp_mode);
2280 delta = simplify_gen_binary (UMOD, mode, delta, step);
2281 may_xform = const0_rtx;
2282 may_not_xform = const_true_rtx;
2284 if (GET_CODE (delta) == CONST_INT)
2286 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2288 /* A special case. We have transformed condition of type
2289 for (i = 0; i < 4; i += 4)
2290 into
2291 for (i = 0; i <= 3; i += 4)
2292 obviously if the test for overflow during that transformation
2293 passed, we cannot overflow here. Most importantly any
2294 loop with sharp end condition and step 1 falls into this
2295 category, so handling this case specially is definitely
2296 worth the troubles. */
2297 may_xform = const_true_rtx;
2299 else if (iv0.step == const0_rtx)
2301 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2302 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2303 bound = lowpart_subreg (mode, bound, comp_mode);
2304 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2305 may_xform = simplify_gen_relational (cond, SImode, mode,
2306 bound, tmp);
2307 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2308 SImode, mode,
2309 bound, tmp);
2311 else
2313 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2314 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2315 bound = lowpart_subreg (mode, bound, comp_mode);
2316 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2317 may_xform = simplify_gen_relational (cond, SImode, mode,
2318 tmp, bound);
2319 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2320 SImode, mode,
2321 tmp, bound);
2325 if (may_xform != const0_rtx)
2327 /* We perform the transformation always provided that it is not
2328 completely senseless. This is OK, as we would need this assumption
2329 to determine the number of iterations anyway. */
2330 if (may_xform != const_true_rtx)
2332 /* If the step is a power of two and the final value we have
2333 computed overflows, the cycle is infinite. Otherwise it
2334 is nontrivial to compute the number of iterations. */
2335 if (step_is_pow2)
2336 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2337 desc->infinite);
2338 else
2339 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2340 desc->assumptions);
2343 /* We are going to lose some information about upper bound on
2344 number of iterations in this step, so record the information
2345 here. */
2346 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2347 if (GET_CODE (iv1.base) == CONST_INT)
2348 up = INTVAL (iv1.base);
2349 else
2350 up = INTVAL (mode_mmax) - inc;
2351 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2352 ? iv0.base
2353 : mode_mmin);
2354 desc->niter_max = (up - down) / inc + 1;
2356 if (iv0.step == const0_rtx)
2358 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2359 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2361 else
2363 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2364 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2367 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2368 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2369 assumption = simplify_gen_relational (reverse_condition (cond),
2370 SImode, mode, tmp0, tmp1);
2371 if (assumption == const_true_rtx)
2372 goto zero_iter_simplify;
2373 else if (assumption != const0_rtx)
2374 desc->noloop_assumptions =
2375 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2376 cond = NE;
2380 /* Count the number of iterations. */
2381 if (cond == NE)
2383 /* Everything we do here is just arithmetics modulo size of mode. This
2384 makes us able to do more involved computations of number of iterations
2385 than in other cases. First transform the condition into shape
2386 s * i <> c, with s positive. */
2387 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2388 iv0.base = const0_rtx;
2389 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2390 iv1.step = const0_rtx;
2391 if (INTVAL (iv0.step) < 0)
2393 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2394 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2396 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2398 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2399 is infinite. Otherwise, the number of iterations is
2400 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2401 s = INTVAL (iv0.step); d = 1;
2402 while (s % 2 != 1)
2404 s /= 2;
2405 d *= 2;
2406 size--;
2408 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2410 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2411 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2412 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2413 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2415 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2416 inv = inverse (s, size);
2417 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2418 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2420 else
2422 if (iv1.step == const0_rtx)
2423 /* Condition in shape a + s * i <= b
2424 We must know that b + s does not overflow and a <= b + s and then we
2425 can compute number of iterations as (b + s - a) / s. (It might
2426 seem that we in fact could be more clever about testing the b + s
2427 overflow condition using some information about b - a mod s,
2428 but it was already taken into account during LE -> NE transform). */
2430 step = iv0.step;
2431 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2432 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2434 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2435 lowpart_subreg (mode, step,
2436 comp_mode));
2437 if (step_is_pow2)
2439 rtx t0, t1;
2441 /* If s is power of 2, we know that the loop is infinite if
2442 a % s <= b % s and b + s overflows. */
2443 assumption = simplify_gen_relational (reverse_condition (cond),
2444 SImode, mode,
2445 tmp1, bound);
2447 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2448 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2449 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2450 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2451 desc->infinite =
2452 alloc_EXPR_LIST (0, assumption, desc->infinite);
2454 else
2456 assumption = simplify_gen_relational (cond, SImode, mode,
2457 tmp1, bound);
2458 desc->assumptions =
2459 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2462 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2463 tmp = lowpart_subreg (mode, tmp, comp_mode);
2464 assumption = simplify_gen_relational (reverse_condition (cond),
2465 SImode, mode, tmp0, tmp);
2467 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2468 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2470 else
2472 /* Condition in shape a <= b - s * i
2473 We must know that a - s does not overflow and a - s <= b and then
2474 we can again compute number of iterations as (b - (a - s)) / s. */
2475 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2476 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2477 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2479 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2480 lowpart_subreg (mode, step, comp_mode));
2481 if (step_is_pow2)
2483 rtx t0, t1;
2485 /* If s is power of 2, we know that the loop is infinite if
2486 a % s <= b % s and a - s overflows. */
2487 assumption = simplify_gen_relational (reverse_condition (cond),
2488 SImode, mode,
2489 bound, tmp0);
2491 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2492 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2493 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2494 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2495 desc->infinite =
2496 alloc_EXPR_LIST (0, assumption, desc->infinite);
2498 else
2500 assumption = simplify_gen_relational (cond, SImode, mode,
2501 bound, tmp0);
2502 desc->assumptions =
2503 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2506 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2507 tmp = lowpart_subreg (mode, tmp, comp_mode);
2508 assumption = simplify_gen_relational (reverse_condition (cond),
2509 SImode, mode,
2510 tmp, tmp1);
2511 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2512 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2514 if (assumption == const_true_rtx)
2515 goto zero_iter_simplify;
2516 else if (assumption != const0_rtx)
2517 desc->noloop_assumptions =
2518 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2519 delta = simplify_gen_binary (UDIV, mode, delta, step);
2520 desc->niter_expr = delta;
2523 old_niter = desc->niter_expr;
2525 simplify_using_initial_values (loop, AND, &desc->assumptions);
2526 if (desc->assumptions
2527 && XEXP (desc->assumptions, 0) == const0_rtx)
2528 goto fail;
2529 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2530 simplify_using_initial_values (loop, IOR, &desc->infinite);
2531 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2533 /* Rerun the simplification. Consider code (created by copying loop headers)
2535 i = 0;
2537 if (0 < n)
2541 i++;
2542 } while (i < n);
2545 The first pass determines that i = 0, the second pass uses it to eliminate
2546 noloop assumption. */
2548 simplify_using_initial_values (loop, AND, &desc->assumptions);
2549 if (desc->assumptions
2550 && XEXP (desc->assumptions, 0) == const0_rtx)
2551 goto fail;
2552 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2553 simplify_using_initial_values (loop, IOR, &desc->infinite);
2554 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2556 if (desc->noloop_assumptions
2557 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2558 goto zero_iter;
2560 if (GET_CODE (desc->niter_expr) == CONST_INT)
2562 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2564 desc->const_iter = true;
2565 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2567 else
2569 if (!desc->niter_max)
2570 desc->niter_max = determine_max_iter (loop, desc);
2572 /* simplify_using_initial_values does a copy propagation on the registers
2573 in the expression for the number of iterations. This prolongs life
2574 ranges of registers and increases register pressure, and usually
2575 brings no gain (and if it happens to do, the cse pass will take care
2576 of it anyway). So prevent this behavior, unless it enabled us to
2577 derive that the number of iterations is a constant. */
2578 desc->niter_expr = old_niter;
2581 return;
2583 zero_iter_simplify:
2584 /* Simplify the assumptions. */
2585 simplify_using_initial_values (loop, AND, &desc->assumptions);
2586 if (desc->assumptions
2587 && XEXP (desc->assumptions, 0) == const0_rtx)
2588 goto fail;
2589 simplify_using_initial_values (loop, IOR, &desc->infinite);
2591 /* Fallthru. */
2592 zero_iter:
2593 desc->const_iter = true;
2594 desc->niter = 0;
2595 desc->niter_max = 0;
2596 desc->noloop_assumptions = NULL_RTX;
2597 desc->niter_expr = const0_rtx;
2598 return;
2600 fail:
2601 desc->simple_p = false;
2602 return;
2605 /* Checks whether E is a simple exit from LOOP and stores its description
2606 into DESC. */
2608 static void
2609 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2611 basic_block exit_bb;
2612 rtx condition, at;
2613 edge ein;
2615 exit_bb = e->src;
2616 desc->simple_p = false;
2618 /* It must belong directly to the loop. */
2619 if (exit_bb->loop_father != loop)
2620 return;
2622 /* It must be tested (at least) once during any iteration. */
2623 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2624 return;
2626 /* It must end in a simple conditional jump. */
2627 if (!any_condjump_p (BB_END (exit_bb)))
2628 return;
2630 ein = EDGE_SUCC (exit_bb, 0);
2631 if (ein == e)
2632 ein = EDGE_SUCC (exit_bb, 1);
2634 desc->out_edge = e;
2635 desc->in_edge = ein;
2637 /* Test whether the condition is suitable. */
2638 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2639 return;
2641 if (ein->flags & EDGE_FALLTHRU)
2643 condition = reversed_condition (condition);
2644 if (!condition)
2645 return;
2648 /* Check that we are able to determine number of iterations and fill
2649 in information about it. */
2650 iv_number_of_iterations (loop, at, condition, desc);
2653 /* Finds a simple exit of LOOP and stores its description into DESC. */
2655 void
2656 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2658 unsigned i;
2659 basic_block *body;
2660 edge e;
2661 struct niter_desc act;
2662 bool any = false;
2663 edge_iterator ei;
2665 desc->simple_p = false;
2666 body = get_loop_body (loop);
2668 for (i = 0; i < loop->num_nodes; i++)
2670 FOR_EACH_EDGE (e, ei, body[i]->succs)
2672 if (flow_bb_inside_loop_p (loop, e->dest))
2673 continue;
2675 check_simple_exit (loop, e, &act);
2676 if (!act.simple_p)
2677 continue;
2679 if (!any)
2680 any = true;
2681 else
2683 /* Prefer constant iterations; the less the better. */
2684 if (!act.const_iter
2685 || (desc->const_iter && act.niter >= desc->niter))
2686 continue;
2688 /* Also if the actual exit may be infinite, while the old one
2689 not, prefer the old one. */
2690 if (act.infinite && !desc->infinite)
2691 continue;
2694 *desc = act;
2698 if (dump_file)
2700 if (desc->simple_p)
2702 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2703 fprintf (dump_file, " simple exit %d -> %d\n",
2704 desc->out_edge->src->index,
2705 desc->out_edge->dest->index);
2706 if (desc->assumptions)
2708 fprintf (dump_file, " assumptions: ");
2709 print_rtl (dump_file, desc->assumptions);
2710 fprintf (dump_file, "\n");
2712 if (desc->noloop_assumptions)
2714 fprintf (dump_file, " does not roll if: ");
2715 print_rtl (dump_file, desc->noloop_assumptions);
2716 fprintf (dump_file, "\n");
2718 if (desc->infinite)
2720 fprintf (dump_file, " infinite if: ");
2721 print_rtl (dump_file, desc->infinite);
2722 fprintf (dump_file, "\n");
2725 fprintf (dump_file, " number of iterations: ");
2726 print_rtl (dump_file, desc->niter_expr);
2727 fprintf (dump_file, "\n");
2729 fprintf (dump_file, " upper bound: ");
2730 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2731 fprintf (dump_file, "\n");
2733 else
2734 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2737 free (body);
2740 /* Creates a simple loop description of LOOP if it was not computed
2741 already. */
2743 struct niter_desc *
2744 get_simple_loop_desc (struct loop *loop)
2746 struct niter_desc *desc = simple_loop_desc (loop);
2748 if (desc)
2749 return desc;
2751 desc = XNEW (struct niter_desc);
2752 iv_analysis_loop_init (loop);
2753 find_simple_exit (loop, desc);
2754 loop->aux = desc;
2756 if (desc->simple_p && (desc->assumptions || desc->infinite))
2758 const char *wording;
2760 /* Assume that no overflow happens and that the loop is finite.
2761 We already warned at the tree level if we ran optimizations there. */
2762 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
2764 if (desc->infinite)
2766 wording =
2767 flag_unsafe_loop_optimizations
2768 ? N_("assuming that the loop is not infinite")
2769 : N_("cannot optimize possibly infinite loops");
2770 warning (OPT_Wunsafe_loop_optimizations, "%s",
2771 gettext (wording));
2773 if (desc->assumptions)
2775 wording =
2776 flag_unsafe_loop_optimizations
2777 ? N_("assuming that the loop counter does not overflow")
2778 : N_("cannot optimize loop, the loop counter may overflow");
2779 warning (OPT_Wunsafe_loop_optimizations, "%s",
2780 gettext (wording));
2784 if (flag_unsafe_loop_optimizations)
2786 desc->assumptions = NULL_RTX;
2787 desc->infinite = NULL_RTX;
2791 return desc;
2794 /* Releases simple loop description for LOOP. */
2796 void
2797 free_simple_loop_desc (struct loop *loop)
2799 struct niter_desc *desc = simple_loop_desc (loop);
2801 if (!desc)
2802 return;
2804 free (desc);
2805 loop->aux = NULL;