2007-03-01 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / loop-iv.c
blob00c82943642e7238268d319d989a1349813bab38
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This is a simple analysis of induction variables of the loop. The major use
22 is for determining the number of iterations of a loop for loop unrolling,
23 doloop optimization and branch prediction. The iv information is computed
24 on demand.
26 Induction variable is analyzed by walking the use-def chains. When a biv
27 is found, it is cached in the bivs hash table. When register is proved
28 to be a giv, its description is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
48 iv_current_loop_df (): Returns the dataflow object for the current loop used
49 by iv analysis. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "hard-reg-set.h"
57 #include "obstack.h"
58 #include "basic-block.h"
59 #include "cfgloop.h"
60 #include "expr.h"
61 #include "intl.h"
62 #include "output.h"
63 #include "toplev.h"
64 #include "df.h"
65 #include "hashtab.h"
67 /* Possible return values of iv_get_reaching_def. */
69 enum iv_grd_result
71 /* More than one reaching def, or reaching def that does not
72 dominate the use. */
73 GRD_INVALID,
75 /* The use is trivial invariant of the loop, i.e. is not changed
76 inside the loop. */
77 GRD_INVARIANT,
79 /* The use is reached by initial value and a value from the
80 previous iteration. */
81 GRD_MAYBE_BIV,
83 /* The use has single dominating def. */
84 GRD_SINGLE_DOM
87 /* Information about a biv. */
89 struct biv_entry
91 unsigned regno; /* The register of the biv. */
92 struct rtx_iv iv; /* Value of the biv. */
95 /* Induction variable stored at the reference. */
96 #define DF_REF_IV(REF) ((struct rtx_iv *) DF_REF_DATA (REF))
97 #define DF_REF_IV_SET(REF, IV) DF_REF_DATA (REF) = (IV)
99 /* The current loop. */
101 static struct loop *current_loop;
103 /* Dataflow information for the current loop. */
105 static struct df *df = NULL;
107 /* Bivs of the current loop. */
109 static htab_t bivs;
111 /* Return the dataflow object for the current loop. */
112 struct df *
113 iv_current_loop_df (void)
115 return df;
118 static bool iv_analyze_op (rtx, rtx, struct rtx_iv *);
120 /* Dumps information about IV to FILE. */
122 extern void dump_iv_info (FILE *, struct rtx_iv *);
123 void
124 dump_iv_info (FILE *file, struct rtx_iv *iv)
126 if (!iv->base)
128 fprintf (file, "not simple");
129 return;
132 if (iv->step == const0_rtx
133 && !iv->first_special)
134 fprintf (file, "invariant ");
136 print_rtl (file, iv->base);
137 if (iv->step != const0_rtx)
139 fprintf (file, " + ");
140 print_rtl (file, iv->step);
141 fprintf (file, " * iteration");
143 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
145 if (iv->mode != iv->extend_mode)
146 fprintf (file, " %s to %s",
147 rtx_name[iv->extend],
148 GET_MODE_NAME (iv->extend_mode));
150 if (iv->mult != const1_rtx)
152 fprintf (file, " * ");
153 print_rtl (file, iv->mult);
155 if (iv->delta != const0_rtx)
157 fprintf (file, " + ");
158 print_rtl (file, iv->delta);
160 if (iv->first_special)
161 fprintf (file, " (first special)");
164 /* Generates a subreg to get the least significant part of EXPR (in mode
165 INNER_MODE) to OUTER_MODE. */
168 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
169 enum machine_mode inner_mode)
171 return simplify_gen_subreg (outer_mode, expr, inner_mode,
172 subreg_lowpart_offset (outer_mode, inner_mode));
175 /* Checks whether REG is a well-behaved register. */
177 static bool
178 simple_reg_p (rtx reg)
180 unsigned r;
182 if (GET_CODE (reg) == SUBREG)
184 if (!subreg_lowpart_p (reg))
185 return false;
186 reg = SUBREG_REG (reg);
189 if (!REG_P (reg))
190 return false;
192 r = REGNO (reg);
193 if (HARD_REGISTER_NUM_P (r))
194 return false;
196 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
197 return false;
199 return true;
202 /* Clears the information about ivs stored in df. */
204 static void
205 clear_iv_info (void)
207 unsigned i, n_defs = DF_DEFS_SIZE (df);
208 struct rtx_iv *iv;
209 struct df_ref *def;
211 for (i = 0; i < n_defs; i++)
213 def = DF_DEFS_GET (df, i);
214 iv = DF_REF_IV (def);
215 if (!iv)
216 continue;
217 free (iv);
218 DF_REF_IV_SET (def, NULL);
221 htab_empty (bivs);
224 /* Returns hash value for biv B. */
226 static hashval_t
227 biv_hash (const void *b)
229 return ((const struct biv_entry *) b)->regno;
232 /* Compares biv B and register R. */
234 static int
235 biv_eq (const void *b, const void *r)
237 return ((const struct biv_entry *) b)->regno == REGNO ((rtx) r);
240 /* Prepare the data for an induction variable analysis of a LOOP. */
242 void
243 iv_analysis_loop_init (struct loop *loop)
245 basic_block *body = get_loop_body_in_dom_order (loop), bb;
246 bitmap blocks = BITMAP_ALLOC (NULL);
247 unsigned i;
248 bool first_time = (df == NULL);
250 current_loop = loop;
252 /* Clear the information from the analysis of the previous loop. */
253 if (first_time)
255 df = df_init (DF_HARD_REGS | DF_EQUIV_NOTES);
256 df_chain_add_problem (df, DF_UD_CHAIN);
257 bivs = htab_create (10, biv_hash, biv_eq, free);
259 else
260 clear_iv_info ();
262 for (i = 0; i < loop->num_nodes; i++)
264 bb = body[i];
265 bitmap_set_bit (blocks, bb->index);
267 df_set_blocks (df, blocks);
268 df_analyze (df);
269 BITMAP_FREE (blocks);
270 free (body);
273 /* Finds the definition of REG that dominates loop latch and stores
274 it to DEF. Returns false if there is not a single definition
275 dominating the latch. If REG has no definition in loop, DEF
276 is set to NULL and true is returned. */
278 static bool
279 latch_dominating_def (rtx reg, struct df_ref **def)
281 struct df_ref *single_rd = NULL, *adef;
282 unsigned regno = REGNO (reg);
283 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
284 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (df, current_loop->latch);
286 for (adef = reg_info->reg_chain; adef; adef = adef->next_reg)
288 if (!bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
289 continue;
291 /* More than one reaching definition. */
292 if (single_rd)
293 return false;
295 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
296 return false;
298 single_rd = adef;
301 *def = single_rd;
302 return true;
305 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
307 static enum iv_grd_result
308 iv_get_reaching_def (rtx insn, rtx reg, struct df_ref **def)
310 struct df_ref *use, *adef;
311 basic_block def_bb, use_bb;
312 rtx def_insn;
313 bool dom_p;
315 *def = NULL;
316 if (!simple_reg_p (reg))
317 return GRD_INVALID;
318 if (GET_CODE (reg) == SUBREG)
319 reg = SUBREG_REG (reg);
320 gcc_assert (REG_P (reg));
322 use = df_find_use (df, insn, reg);
323 gcc_assert (use != NULL);
325 if (!DF_REF_CHAIN (use))
326 return GRD_INVARIANT;
328 /* More than one reaching def. */
329 if (DF_REF_CHAIN (use)->next)
330 return GRD_INVALID;
332 adef = DF_REF_CHAIN (use)->ref;
333 def_insn = DF_REF_INSN (adef);
334 def_bb = DF_REF_BB (adef);
335 use_bb = BLOCK_FOR_INSN (insn);
337 if (use_bb == def_bb)
338 dom_p = (DF_INSN_LUID (df, def_insn) < DF_INSN_LUID (df, insn));
339 else
340 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
342 if (dom_p)
344 *def = adef;
345 return GRD_SINGLE_DOM;
348 /* The definition does not dominate the use. This is still OK if
349 this may be a use of a biv, i.e. if the def_bb dominates loop
350 latch. */
351 if (just_once_each_iteration_p (current_loop, def_bb))
352 return GRD_MAYBE_BIV;
354 return GRD_INVALID;
357 /* Sets IV to invariant CST in MODE. Always returns true (just for
358 consistency with other iv manipulation functions that may fail). */
360 static bool
361 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
363 if (mode == VOIDmode)
364 mode = GET_MODE (cst);
366 iv->mode = mode;
367 iv->base = cst;
368 iv->step = const0_rtx;
369 iv->first_special = false;
370 iv->extend = UNKNOWN;
371 iv->extend_mode = iv->mode;
372 iv->delta = const0_rtx;
373 iv->mult = const1_rtx;
375 return true;
378 /* Evaluates application of subreg to MODE on IV. */
380 static bool
381 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
383 /* If iv is invariant, just calculate the new value. */
384 if (iv->step == const0_rtx
385 && !iv->first_special)
387 rtx val = get_iv_value (iv, const0_rtx);
388 val = lowpart_subreg (mode, val, iv->extend_mode);
390 iv->base = val;
391 iv->extend = UNKNOWN;
392 iv->mode = iv->extend_mode = mode;
393 iv->delta = const0_rtx;
394 iv->mult = const1_rtx;
395 return true;
398 if (iv->extend_mode == mode)
399 return true;
401 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
402 return false;
404 iv->extend = UNKNOWN;
405 iv->mode = mode;
407 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
408 simplify_gen_binary (MULT, iv->extend_mode,
409 iv->base, iv->mult));
410 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
411 iv->mult = const1_rtx;
412 iv->delta = const0_rtx;
413 iv->first_special = false;
415 return true;
418 /* Evaluates application of EXTEND to MODE on IV. */
420 static bool
421 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
423 /* If iv is invariant, just calculate the new value. */
424 if (iv->step == const0_rtx
425 && !iv->first_special)
427 rtx val = get_iv_value (iv, const0_rtx);
428 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
430 iv->base = val;
431 iv->extend = UNKNOWN;
432 iv->mode = iv->extend_mode = mode;
433 iv->delta = const0_rtx;
434 iv->mult = const1_rtx;
435 return true;
438 if (mode != iv->extend_mode)
439 return false;
441 if (iv->extend != UNKNOWN
442 && iv->extend != extend)
443 return false;
445 iv->extend = extend;
447 return true;
450 /* Evaluates negation of IV. */
452 static bool
453 iv_neg (struct rtx_iv *iv)
455 if (iv->extend == UNKNOWN)
457 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
458 iv->base, iv->extend_mode);
459 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
460 iv->step, iv->extend_mode);
462 else
464 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
465 iv->delta, iv->extend_mode);
466 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
467 iv->mult, iv->extend_mode);
470 return true;
473 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
475 static bool
476 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
478 enum machine_mode mode;
479 rtx arg;
481 /* Extend the constant to extend_mode of the other operand if necessary. */
482 if (iv0->extend == UNKNOWN
483 && iv0->mode == iv0->extend_mode
484 && iv0->step == const0_rtx
485 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
487 iv0->extend_mode = iv1->extend_mode;
488 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
489 iv0->base, iv0->mode);
491 if (iv1->extend == UNKNOWN
492 && iv1->mode == iv1->extend_mode
493 && iv1->step == const0_rtx
494 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
496 iv1->extend_mode = iv0->extend_mode;
497 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
498 iv1->base, iv1->mode);
501 mode = iv0->extend_mode;
502 if (mode != iv1->extend_mode)
503 return false;
505 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
507 if (iv0->mode != iv1->mode)
508 return false;
510 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
511 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
513 return true;
516 /* Handle addition of constant. */
517 if (iv1->extend == UNKNOWN
518 && iv1->mode == mode
519 && iv1->step == const0_rtx)
521 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
522 return true;
525 if (iv0->extend == UNKNOWN
526 && iv0->mode == mode
527 && iv0->step == const0_rtx)
529 arg = iv0->base;
530 *iv0 = *iv1;
531 if (op == MINUS
532 && !iv_neg (iv0))
533 return false;
535 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
536 return true;
539 return false;
542 /* Evaluates multiplication of IV by constant CST. */
544 static bool
545 iv_mult (struct rtx_iv *iv, rtx mby)
547 enum machine_mode mode = iv->extend_mode;
549 if (GET_MODE (mby) != VOIDmode
550 && GET_MODE (mby) != mode)
551 return false;
553 if (iv->extend == UNKNOWN)
555 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
556 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
558 else
560 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
561 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
564 return true;
567 /* Evaluates shift of IV by constant CST. */
569 static bool
570 iv_shift (struct rtx_iv *iv, rtx mby)
572 enum machine_mode mode = iv->extend_mode;
574 if (GET_MODE (mby) != VOIDmode
575 && GET_MODE (mby) != mode)
576 return false;
578 if (iv->extend == UNKNOWN)
580 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
581 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
583 else
585 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
586 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
589 return true;
592 /* The recursive part of get_biv_step. Gets the value of the single value
593 defined by DEF wrto initial value of REG inside loop, in shape described
594 at get_biv_step. */
596 static bool
597 get_biv_step_1 (struct df_ref *def, rtx reg,
598 rtx *inner_step, enum machine_mode *inner_mode,
599 enum rtx_code *extend, enum machine_mode outer_mode,
600 rtx *outer_step)
602 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
603 rtx next, nextr, tmp;
604 enum rtx_code code;
605 rtx insn = DF_REF_INSN (def);
606 struct df_ref *next_def;
607 enum iv_grd_result res;
609 set = single_set (insn);
610 if (!set)
611 return false;
613 rhs = find_reg_equal_equiv_note (insn);
614 if (rhs)
615 rhs = XEXP (rhs, 0);
616 else
617 rhs = SET_SRC (set);
619 code = GET_CODE (rhs);
620 switch (code)
622 case SUBREG:
623 case REG:
624 next = rhs;
625 break;
627 case PLUS:
628 case MINUS:
629 op0 = XEXP (rhs, 0);
630 op1 = XEXP (rhs, 1);
632 if (code == PLUS && CONSTANT_P (op0))
634 tmp = op0; op0 = op1; op1 = tmp;
637 if (!simple_reg_p (op0)
638 || !CONSTANT_P (op1))
639 return false;
641 if (GET_MODE (rhs) != outer_mode)
643 /* ppc64 uses expressions like
645 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
647 this is equivalent to
649 (set x':DI (plus:DI y:DI 1))
650 (set x:SI (subreg:SI (x':DI)). */
651 if (GET_CODE (op0) != SUBREG)
652 return false;
653 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
654 return false;
657 next = op0;
658 break;
660 case SIGN_EXTEND:
661 case ZERO_EXTEND:
662 if (GET_MODE (rhs) != outer_mode)
663 return false;
665 op0 = XEXP (rhs, 0);
666 if (!simple_reg_p (op0))
667 return false;
669 next = op0;
670 break;
672 default:
673 return false;
676 if (GET_CODE (next) == SUBREG)
678 if (!subreg_lowpart_p (next))
679 return false;
681 nextr = SUBREG_REG (next);
682 if (GET_MODE (nextr) != outer_mode)
683 return false;
685 else
686 nextr = next;
688 res = iv_get_reaching_def (insn, nextr, &next_def);
690 if (res == GRD_INVALID || res == GRD_INVARIANT)
691 return false;
693 if (res == GRD_MAYBE_BIV)
695 if (!rtx_equal_p (nextr, reg))
696 return false;
698 *inner_step = const0_rtx;
699 *extend = UNKNOWN;
700 *inner_mode = outer_mode;
701 *outer_step = const0_rtx;
703 else if (!get_biv_step_1 (next_def, reg,
704 inner_step, inner_mode, extend, outer_mode,
705 outer_step))
706 return false;
708 if (GET_CODE (next) == SUBREG)
710 enum machine_mode amode = GET_MODE (next);
712 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
713 return false;
715 *inner_mode = amode;
716 *inner_step = simplify_gen_binary (PLUS, outer_mode,
717 *inner_step, *outer_step);
718 *outer_step = const0_rtx;
719 *extend = UNKNOWN;
722 switch (code)
724 case REG:
725 case SUBREG:
726 break;
728 case PLUS:
729 case MINUS:
730 if (*inner_mode == outer_mode
731 /* See comment in previous switch. */
732 || GET_MODE (rhs) != outer_mode)
733 *inner_step = simplify_gen_binary (code, outer_mode,
734 *inner_step, op1);
735 else
736 *outer_step = simplify_gen_binary (code, outer_mode,
737 *outer_step, op1);
738 break;
740 case SIGN_EXTEND:
741 case ZERO_EXTEND:
742 gcc_assert (GET_MODE (op0) == *inner_mode
743 && *extend == UNKNOWN
744 && *outer_step == const0_rtx);
746 *extend = code;
747 break;
749 default:
750 return false;
753 return true;
756 /* Gets the operation on register REG inside loop, in shape
758 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
760 If the operation cannot be described in this shape, return false.
761 LAST_DEF is the definition of REG that dominates loop latch. */
763 static bool
764 get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
765 enum machine_mode *inner_mode, enum rtx_code *extend,
766 enum machine_mode *outer_mode, rtx *outer_step)
768 *outer_mode = GET_MODE (reg);
770 if (!get_biv_step_1 (last_def, reg,
771 inner_step, inner_mode, extend, *outer_mode,
772 outer_step))
773 return false;
775 gcc_assert ((*inner_mode == *outer_mode) != (*extend != UNKNOWN));
776 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
778 return true;
781 /* Records information that DEF is induction variable IV. */
783 static void
784 record_iv (struct df_ref *def, struct rtx_iv *iv)
786 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
788 *recorded_iv = *iv;
789 DF_REF_IV_SET (def, recorded_iv);
792 /* If DEF was already analyzed for bivness, store the description of the biv to
793 IV and return true. Otherwise return false. */
795 static bool
796 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
798 struct biv_entry *biv = htab_find_with_hash (bivs, def, REGNO (def));
800 if (!biv)
801 return false;
803 *iv = biv->iv;
804 return true;
807 static void
808 record_biv (rtx def, struct rtx_iv *iv)
810 struct biv_entry *biv = XNEW (struct biv_entry);
811 void **slot = htab_find_slot_with_hash (bivs, def, REGNO (def), INSERT);
813 biv->regno = REGNO (def);
814 biv->iv = *iv;
815 gcc_assert (!*slot);
816 *slot = biv;
819 /* Determines whether DEF is a biv and if so, stores its description
820 to *IV. */
822 static bool
823 iv_analyze_biv (rtx def, struct rtx_iv *iv)
825 rtx inner_step, outer_step;
826 enum machine_mode inner_mode, outer_mode;
827 enum rtx_code extend;
828 struct df_ref *last_def;
830 if (dump_file)
832 fprintf (dump_file, "Analyzing ");
833 print_rtl (dump_file, def);
834 fprintf (dump_file, " for bivness.\n");
837 if (!REG_P (def))
839 if (!CONSTANT_P (def))
840 return false;
842 return iv_constant (iv, def, VOIDmode);
845 if (!latch_dominating_def (def, &last_def))
847 if (dump_file)
848 fprintf (dump_file, " not simple.\n");
849 return false;
852 if (!last_def)
853 return iv_constant (iv, def, VOIDmode);
855 if (analyzed_for_bivness_p (def, iv))
857 if (dump_file)
858 fprintf (dump_file, " already analysed.\n");
859 return iv->base != NULL_RTX;
862 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
863 &outer_mode, &outer_step))
865 iv->base = NULL_RTX;
866 goto end;
869 /* Loop transforms base to es (base + inner_step) + outer_step,
870 where es means extend of subreg between inner_mode and outer_mode.
871 The corresponding induction variable is
873 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
875 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
876 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
877 iv->mode = inner_mode;
878 iv->extend_mode = outer_mode;
879 iv->extend = extend;
880 iv->mult = const1_rtx;
881 iv->delta = outer_step;
882 iv->first_special = inner_mode != outer_mode;
884 end:
885 if (dump_file)
887 fprintf (dump_file, " ");
888 dump_iv_info (dump_file, iv);
889 fprintf (dump_file, "\n");
892 record_biv (def, iv);
893 return iv->base != NULL_RTX;
896 /* Analyzes expression RHS used at INSN and stores the result to *IV.
897 The mode of the induction variable is MODE. */
899 bool
900 iv_analyze_expr (rtx insn, rtx rhs, enum machine_mode mode, struct rtx_iv *iv)
902 rtx mby = NULL_RTX, tmp;
903 rtx op0 = NULL_RTX, op1 = NULL_RTX;
904 struct rtx_iv iv0, iv1;
905 enum rtx_code code = GET_CODE (rhs);
906 enum machine_mode omode = mode;
908 iv->mode = VOIDmode;
909 iv->base = NULL_RTX;
910 iv->step = NULL_RTX;
912 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
914 if (CONSTANT_P (rhs)
915 || REG_P (rhs)
916 || code == SUBREG)
918 if (!iv_analyze_op (insn, rhs, iv))
919 return false;
921 if (iv->mode == VOIDmode)
923 iv->mode = mode;
924 iv->extend_mode = mode;
927 return true;
930 switch (code)
932 case REG:
933 op0 = rhs;
934 break;
936 case SIGN_EXTEND:
937 case ZERO_EXTEND:
938 case NEG:
939 op0 = XEXP (rhs, 0);
940 omode = GET_MODE (op0);
941 break;
943 case PLUS:
944 case MINUS:
945 op0 = XEXP (rhs, 0);
946 op1 = XEXP (rhs, 1);
947 break;
949 case MULT:
950 op0 = XEXP (rhs, 0);
951 mby = XEXP (rhs, 1);
952 if (!CONSTANT_P (mby))
954 tmp = op0;
955 op0 = mby;
956 mby = tmp;
958 if (!CONSTANT_P (mby))
959 return false;
960 break;
962 case ASHIFT:
963 op0 = XEXP (rhs, 0);
964 mby = XEXP (rhs, 1);
965 if (!CONSTANT_P (mby))
966 return false;
967 break;
969 default:
970 return false;
973 if (op0
974 && !iv_analyze_expr (insn, op0, omode, &iv0))
975 return false;
977 if (op1
978 && !iv_analyze_expr (insn, op1, omode, &iv1))
979 return false;
981 switch (code)
983 case SIGN_EXTEND:
984 case ZERO_EXTEND:
985 if (!iv_extend (&iv0, code, mode))
986 return false;
987 break;
989 case NEG:
990 if (!iv_neg (&iv0))
991 return false;
992 break;
994 case PLUS:
995 case MINUS:
996 if (!iv_add (&iv0, &iv1, code))
997 return false;
998 break;
1000 case MULT:
1001 if (!iv_mult (&iv0, mby))
1002 return false;
1003 break;
1005 case ASHIFT:
1006 if (!iv_shift (&iv0, mby))
1007 return false;
1008 break;
1010 default:
1011 break;
1014 *iv = iv0;
1015 return iv->base != NULL_RTX;
1018 /* Analyzes iv DEF and stores the result to *IV. */
1020 static bool
1021 iv_analyze_def (struct df_ref *def, struct rtx_iv *iv)
1023 rtx insn = DF_REF_INSN (def);
1024 rtx reg = DF_REF_REG (def);
1025 rtx set, rhs;
1027 if (dump_file)
1029 fprintf (dump_file, "Analysing def of ");
1030 print_rtl (dump_file, reg);
1031 fprintf (dump_file, " in insn ");
1032 print_rtl_single (dump_file, insn);
1035 if (DF_REF_IV (def))
1037 if (dump_file)
1038 fprintf (dump_file, " already analysed.\n");
1039 *iv = *DF_REF_IV (def);
1040 return iv->base != NULL_RTX;
1043 iv->mode = VOIDmode;
1044 iv->base = NULL_RTX;
1045 iv->step = NULL_RTX;
1047 set = single_set (insn);
1048 if (!set || SET_DEST (set) != reg)
1049 return false;
1051 rhs = find_reg_equal_equiv_note (insn);
1052 if (rhs)
1053 rhs = XEXP (rhs, 0);
1054 else
1055 rhs = SET_SRC (set);
1057 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1058 record_iv (def, iv);
1060 if (dump_file)
1062 print_rtl (dump_file, reg);
1063 fprintf (dump_file, " in insn ");
1064 print_rtl_single (dump_file, insn);
1065 fprintf (dump_file, " is ");
1066 dump_iv_info (dump_file, iv);
1067 fprintf (dump_file, "\n");
1070 return iv->base != NULL_RTX;
1073 /* Analyzes operand OP of INSN and stores the result to *IV. */
1075 static bool
1076 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
1078 struct df_ref *def = NULL;
1079 enum iv_grd_result res;
1081 if (dump_file)
1083 fprintf (dump_file, "Analysing operand ");
1084 print_rtl (dump_file, op);
1085 fprintf (dump_file, " of insn ");
1086 print_rtl_single (dump_file, insn);
1089 if (CONSTANT_P (op))
1090 res = GRD_INVARIANT;
1091 else if (GET_CODE (op) == SUBREG)
1093 if (!subreg_lowpart_p (op))
1094 return false;
1096 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1097 return false;
1099 return iv_subreg (iv, GET_MODE (op));
1101 else
1103 res = iv_get_reaching_def (insn, op, &def);
1104 if (res == GRD_INVALID)
1106 if (dump_file)
1107 fprintf (dump_file, " not simple.\n");
1108 return false;
1112 if (res == GRD_INVARIANT)
1114 iv_constant (iv, op, VOIDmode);
1116 if (dump_file)
1118 fprintf (dump_file, " ");
1119 dump_iv_info (dump_file, iv);
1120 fprintf (dump_file, "\n");
1122 return true;
1125 if (res == GRD_MAYBE_BIV)
1126 return iv_analyze_biv (op, iv);
1128 return iv_analyze_def (def, iv);
1131 /* Analyzes value VAL at INSN and stores the result to *IV. */
1133 bool
1134 iv_analyze (rtx insn, rtx val, struct rtx_iv *iv)
1136 rtx reg;
1138 /* We must find the insn in that val is used, so that we get to UD chains.
1139 Since the function is sometimes called on result of get_condition,
1140 this does not necessarily have to be directly INSN; scan also the
1141 following insns. */
1142 if (simple_reg_p (val))
1144 if (GET_CODE (val) == SUBREG)
1145 reg = SUBREG_REG (val);
1146 else
1147 reg = val;
1149 while (!df_find_use (df, insn, reg))
1150 insn = NEXT_INSN (insn);
1153 return iv_analyze_op (insn, val, iv);
1156 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1158 bool
1159 iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
1161 struct df_ref *adef;
1163 adef = df_find_def (df, insn, def);
1164 if (!adef)
1165 return false;
1167 return iv_analyze_def (adef, iv);
1170 /* Checks whether definition of register REG in INSN is a basic induction
1171 variable. IV analysis must have been initialized (via a call to
1172 iv_analysis_loop_init) for this function to produce a result. */
1174 bool
1175 biv_p (rtx insn, rtx reg)
1177 struct rtx_iv iv;
1178 struct df_ref *def, *last_def;
1180 if (!simple_reg_p (reg))
1181 return false;
1183 def = df_find_def (df, insn, reg);
1184 gcc_assert (def != NULL);
1185 if (!latch_dominating_def (reg, &last_def))
1186 return false;
1187 if (last_def != def)
1188 return false;
1190 if (!iv_analyze_biv (reg, &iv))
1191 return false;
1193 return iv.step != const0_rtx;
1196 /* Calculates value of IV at ITERATION-th iteration. */
1199 get_iv_value (struct rtx_iv *iv, rtx iteration)
1201 rtx val;
1203 /* We would need to generate some if_then_else patterns, and so far
1204 it is not needed anywhere. */
1205 gcc_assert (!iv->first_special);
1207 if (iv->step != const0_rtx && iteration != const0_rtx)
1208 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1209 simplify_gen_binary (MULT, iv->extend_mode,
1210 iv->step, iteration));
1211 else
1212 val = iv->base;
1214 if (iv->extend_mode == iv->mode)
1215 return val;
1217 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1219 if (iv->extend == UNKNOWN)
1220 return val;
1222 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1223 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1224 simplify_gen_binary (MULT, iv->extend_mode,
1225 iv->mult, val));
1227 return val;
1230 /* Free the data for an induction variable analysis. */
1232 void
1233 iv_analysis_done (void)
1235 if (df)
1237 clear_iv_info ();
1238 df_finish (df);
1239 df = NULL;
1240 htab_delete (bivs);
1241 bivs = NULL;
1245 /* Computes inverse to X modulo (1 << MOD). */
1247 static unsigned HOST_WIDEST_INT
1248 inverse (unsigned HOST_WIDEST_INT x, int mod)
1250 unsigned HOST_WIDEST_INT mask =
1251 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1252 unsigned HOST_WIDEST_INT rslt = 1;
1253 int i;
1255 for (i = 0; i < mod - 1; i++)
1257 rslt = (rslt * x) & mask;
1258 x = (x * x) & mask;
1261 return rslt;
1264 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1266 static int
1267 altered_reg_used (rtx *reg, void *alt)
1269 if (!REG_P (*reg))
1270 return 0;
1272 return REGNO_REG_SET_P (alt, REGNO (*reg));
1275 /* Marks registers altered by EXPR in set ALT. */
1277 static void
1278 mark_altered (rtx expr, rtx by ATTRIBUTE_UNUSED, void *alt)
1280 if (GET_CODE (expr) == SUBREG)
1281 expr = SUBREG_REG (expr);
1282 if (!REG_P (expr))
1283 return;
1285 SET_REGNO_REG_SET (alt, REGNO (expr));
1288 /* Checks whether RHS is simple enough to process. */
1290 static bool
1291 simple_rhs_p (rtx rhs)
1293 rtx op0, op1;
1295 if (CONSTANT_P (rhs)
1296 || REG_P (rhs))
1297 return true;
1299 switch (GET_CODE (rhs))
1301 case PLUS:
1302 case MINUS:
1303 op0 = XEXP (rhs, 0);
1304 op1 = XEXP (rhs, 1);
1305 /* Allow reg + const sets only. */
1306 if (REG_P (op0) && CONSTANT_P (op1))
1307 return true;
1308 if (REG_P (op1) && CONSTANT_P (op0))
1309 return true;
1311 return false;
1313 default:
1314 return false;
1318 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1319 altered so far. */
1321 static void
1322 simplify_using_assignment (rtx insn, rtx *expr, regset altered)
1324 rtx set = single_set (insn);
1325 rtx lhs = NULL_RTX, rhs;
1326 bool ret = false;
1328 if (set)
1330 lhs = SET_DEST (set);
1331 if (!REG_P (lhs)
1332 || altered_reg_used (&lhs, altered))
1333 ret = true;
1335 else
1336 ret = true;
1338 note_stores (PATTERN (insn), mark_altered, altered);
1339 if (CALL_P (insn))
1341 int i;
1343 /* Kill all call clobbered registers. */
1344 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1345 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1346 SET_REGNO_REG_SET (altered, i);
1349 if (ret)
1350 return;
1352 rhs = find_reg_equal_equiv_note (insn);
1353 if (rhs)
1354 rhs = XEXP (rhs, 0);
1355 else
1356 rhs = SET_SRC (set);
1358 if (!simple_rhs_p (rhs))
1359 return;
1361 if (for_each_rtx (&rhs, altered_reg_used, altered))
1362 return;
1364 *expr = simplify_replace_rtx (*expr, lhs, rhs);
1367 /* Checks whether A implies B. */
1369 static bool
1370 implies_p (rtx a, rtx b)
1372 rtx op0, op1, opb0, opb1, r;
1373 enum machine_mode mode;
1375 if (GET_CODE (a) == EQ)
1377 op0 = XEXP (a, 0);
1378 op1 = XEXP (a, 1);
1380 if (REG_P (op0))
1382 r = simplify_replace_rtx (b, op0, op1);
1383 if (r == const_true_rtx)
1384 return true;
1387 if (REG_P (op1))
1389 r = simplify_replace_rtx (b, op1, op0);
1390 if (r == const_true_rtx)
1391 return true;
1395 if (b == const_true_rtx)
1396 return true;
1398 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1399 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1400 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1401 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1402 return false;
1404 op0 = XEXP (a, 0);
1405 op1 = XEXP (a, 1);
1406 opb0 = XEXP (b, 0);
1407 opb1 = XEXP (b, 1);
1409 mode = GET_MODE (op0);
1410 if (mode != GET_MODE (opb0))
1411 mode = VOIDmode;
1412 else if (mode == VOIDmode)
1414 mode = GET_MODE (op1);
1415 if (mode != GET_MODE (opb1))
1416 mode = VOIDmode;
1419 /* A < B implies A + 1 <= B. */
1420 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1421 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1424 if (GET_CODE (a) == GT)
1426 r = op0;
1427 op0 = op1;
1428 op1 = r;
1431 if (GET_CODE (b) == GE)
1433 r = opb0;
1434 opb0 = opb1;
1435 opb1 = r;
1438 if (SCALAR_INT_MODE_P (mode)
1439 && rtx_equal_p (op1, opb1)
1440 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1441 return true;
1442 return false;
1445 /* A < B or A > B imply A != B. TODO: Likewise
1446 A + n < B implies A != B + n if neither wraps. */
1447 if (GET_CODE (b) == NE
1448 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1449 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1451 if (rtx_equal_p (op0, opb0)
1452 && rtx_equal_p (op1, opb1))
1453 return true;
1456 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1457 if (GET_CODE (a) == NE
1458 && op1 == const0_rtx)
1460 if ((GET_CODE (b) == GTU
1461 && opb1 == const0_rtx)
1462 || (GET_CODE (b) == GEU
1463 && opb1 == const1_rtx))
1464 return rtx_equal_p (op0, opb0);
1467 /* A != N is equivalent to A - (N + 1) <u -1. */
1468 if (GET_CODE (a) == NE
1469 && GET_CODE (op1) == CONST_INT
1470 && GET_CODE (b) == LTU
1471 && opb1 == constm1_rtx
1472 && GET_CODE (opb0) == PLUS
1473 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1474 /* Avoid overflows. */
1475 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1476 != ((unsigned HOST_WIDE_INT)1
1477 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1478 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1479 return rtx_equal_p (op0, XEXP (opb0, 0));
1481 /* Likewise, A != N implies A - N > 0. */
1482 if (GET_CODE (a) == NE
1483 && GET_CODE (op1) == CONST_INT)
1485 if (GET_CODE (b) == GTU
1486 && GET_CODE (opb0) == PLUS
1487 && opb1 == const0_rtx
1488 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1489 /* Avoid overflows. */
1490 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1491 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1492 && rtx_equal_p (XEXP (opb0, 0), op0))
1493 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1494 if (GET_CODE (b) == GEU
1495 && GET_CODE (opb0) == PLUS
1496 && opb1 == const1_rtx
1497 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1498 /* Avoid overflows. */
1499 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1500 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1501 && rtx_equal_p (XEXP (opb0, 0), op0))
1502 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1505 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1506 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1507 && GET_CODE (op1) == CONST_INT
1508 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1509 || INTVAL (op1) >= 0)
1510 && GET_CODE (b) == LTU
1511 && GET_CODE (opb1) == CONST_INT)
1512 return INTVAL (opb1) < 0;
1514 return false;
1517 /* Canonicalizes COND so that
1519 (1) Ensure that operands are ordered according to
1520 swap_commutative_operands_p.
1521 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1522 for GE, GEU, and LEU. */
1525 canon_condition (rtx cond)
1527 rtx tem;
1528 rtx op0, op1;
1529 enum rtx_code code;
1530 enum machine_mode mode;
1532 code = GET_CODE (cond);
1533 op0 = XEXP (cond, 0);
1534 op1 = XEXP (cond, 1);
1536 if (swap_commutative_operands_p (op0, op1))
1538 code = swap_condition (code);
1539 tem = op0;
1540 op0 = op1;
1541 op1 = tem;
1544 mode = GET_MODE (op0);
1545 if (mode == VOIDmode)
1546 mode = GET_MODE (op1);
1547 gcc_assert (mode != VOIDmode);
1549 if (GET_CODE (op1) == CONST_INT
1550 && GET_MODE_CLASS (mode) != MODE_CC
1551 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1553 HOST_WIDE_INT const_val = INTVAL (op1);
1554 unsigned HOST_WIDE_INT uconst_val = const_val;
1555 unsigned HOST_WIDE_INT max_val
1556 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1558 switch (code)
1560 case LE:
1561 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1562 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1563 break;
1565 /* When cross-compiling, const_val might be sign-extended from
1566 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1567 case GE:
1568 if ((HOST_WIDE_INT) (const_val & max_val)
1569 != (((HOST_WIDE_INT) 1
1570 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1571 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1572 break;
1574 case LEU:
1575 if (uconst_val < max_val)
1576 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1577 break;
1579 case GEU:
1580 if (uconst_val != 0)
1581 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1582 break;
1584 default:
1585 break;
1589 if (op0 != XEXP (cond, 0)
1590 || op1 != XEXP (cond, 1)
1591 || code != GET_CODE (cond)
1592 || GET_MODE (cond) != SImode)
1593 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1595 return cond;
1598 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1599 set of altered regs. */
1601 void
1602 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1604 rtx rev, reve, exp = *expr;
1606 if (!COMPARISON_P (exp))
1607 return;
1609 /* If some register gets altered later, we do not really speak about its
1610 value at the time of comparison. */
1611 if (altered
1612 && for_each_rtx (&cond, altered_reg_used, altered))
1613 return;
1615 rev = reversed_condition (cond);
1616 reve = reversed_condition (exp);
1618 cond = canon_condition (cond);
1619 exp = canon_condition (exp);
1620 if (rev)
1621 rev = canon_condition (rev);
1622 if (reve)
1623 reve = canon_condition (reve);
1625 if (rtx_equal_p (exp, cond))
1627 *expr = const_true_rtx;
1628 return;
1632 if (rev && rtx_equal_p (exp, rev))
1634 *expr = const0_rtx;
1635 return;
1638 if (implies_p (cond, exp))
1640 *expr = const_true_rtx;
1641 return;
1644 if (reve && implies_p (cond, reve))
1646 *expr = const0_rtx;
1647 return;
1650 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1651 be false. */
1652 if (rev && implies_p (exp, rev))
1654 *expr = const0_rtx;
1655 return;
1658 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1659 if (rev && reve && implies_p (reve, rev))
1661 *expr = const_true_rtx;
1662 return;
1665 /* We would like to have some other tests here. TODO. */
1667 return;
1670 /* Use relationship between A and *B to eventually eliminate *B.
1671 OP is the operation we consider. */
1673 static void
1674 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1676 switch (op)
1678 case AND:
1679 /* If A implies *B, we may replace *B by true. */
1680 if (implies_p (a, *b))
1681 *b = const_true_rtx;
1682 break;
1684 case IOR:
1685 /* If *B implies A, we may replace *B by false. */
1686 if (implies_p (*b, a))
1687 *b = const0_rtx;
1688 break;
1690 default:
1691 gcc_unreachable ();
1695 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1696 operation we consider. */
1698 static void
1699 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1701 rtx elt;
1703 for (elt = tail; elt; elt = XEXP (elt, 1))
1704 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1705 for (elt = tail; elt; elt = XEXP (elt, 1))
1706 eliminate_implied_condition (op, XEXP (elt, 0), head);
1709 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1710 is a list, its elements are assumed to be combined using OP. */
1712 static void
1713 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1715 rtx head, tail, insn;
1716 rtx neutral, aggr;
1717 regset altered;
1718 edge e;
1720 if (!*expr)
1721 return;
1723 if (CONSTANT_P (*expr))
1724 return;
1726 if (GET_CODE (*expr) == EXPR_LIST)
1728 head = XEXP (*expr, 0);
1729 tail = XEXP (*expr, 1);
1731 eliminate_implied_conditions (op, &head, tail);
1733 switch (op)
1735 case AND:
1736 neutral = const_true_rtx;
1737 aggr = const0_rtx;
1738 break;
1740 case IOR:
1741 neutral = const0_rtx;
1742 aggr = const_true_rtx;
1743 break;
1745 default:
1746 gcc_unreachable ();
1749 simplify_using_initial_values (loop, UNKNOWN, &head);
1750 if (head == aggr)
1752 XEXP (*expr, 0) = aggr;
1753 XEXP (*expr, 1) = NULL_RTX;
1754 return;
1756 else if (head == neutral)
1758 *expr = tail;
1759 simplify_using_initial_values (loop, op, expr);
1760 return;
1762 simplify_using_initial_values (loop, op, &tail);
1764 if (tail && XEXP (tail, 0) == aggr)
1766 *expr = tail;
1767 return;
1770 XEXP (*expr, 0) = head;
1771 XEXP (*expr, 1) = tail;
1772 return;
1775 gcc_assert (op == UNKNOWN);
1777 e = loop_preheader_edge (loop);
1778 if (e->src == ENTRY_BLOCK_PTR)
1779 return;
1781 altered = ALLOC_REG_SET (&reg_obstack);
1783 while (1)
1785 insn = BB_END (e->src);
1786 if (any_condjump_p (insn))
1788 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1790 if (cond && (e->flags & EDGE_FALLTHRU))
1791 cond = reversed_condition (cond);
1792 if (cond)
1794 simplify_using_condition (cond, expr, altered);
1795 if (CONSTANT_P (*expr))
1797 FREE_REG_SET (altered);
1798 return;
1803 FOR_BB_INSNS_REVERSE (e->src, insn)
1805 if (!INSN_P (insn))
1806 continue;
1808 simplify_using_assignment (insn, expr, altered);
1809 if (CONSTANT_P (*expr))
1811 FREE_REG_SET (altered);
1812 return;
1814 if (for_each_rtx (expr, altered_reg_used, altered))
1816 FREE_REG_SET (altered);
1817 return;
1821 if (!single_pred_p (e->src)
1822 || single_pred (e->src) == ENTRY_BLOCK_PTR)
1823 break;
1824 e = single_pred_edge (e->src);
1827 FREE_REG_SET (altered);
1830 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1831 that IV occurs as left operands of comparison COND and its signedness
1832 is SIGNED_P to DESC. */
1834 static void
1835 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
1836 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
1838 rtx mmin, mmax, cond_over, cond_under;
1840 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
1841 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
1842 iv->base, mmin);
1843 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
1844 iv->base, mmax);
1846 switch (cond)
1848 case LE:
1849 case LT:
1850 case LEU:
1851 case LTU:
1852 if (cond_under != const0_rtx)
1853 desc->infinite =
1854 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1855 if (cond_over != const0_rtx)
1856 desc->noloop_assumptions =
1857 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
1858 break;
1860 case GE:
1861 case GT:
1862 case GEU:
1863 case GTU:
1864 if (cond_over != const0_rtx)
1865 desc->infinite =
1866 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1867 if (cond_under != const0_rtx)
1868 desc->noloop_assumptions =
1869 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
1870 break;
1872 case NE:
1873 if (cond_over != const0_rtx)
1874 desc->infinite =
1875 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1876 if (cond_under != const0_rtx)
1877 desc->infinite =
1878 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1879 break;
1881 default:
1882 gcc_unreachable ();
1885 iv->mode = mode;
1886 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
1889 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1890 subregs of the same mode if possible (sometimes it is necessary to add
1891 some assumptions to DESC). */
1893 static bool
1894 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
1895 enum rtx_code cond, struct niter_desc *desc)
1897 enum machine_mode comp_mode;
1898 bool signed_p;
1900 /* If the ivs behave specially in the first iteration, or are
1901 added/multiplied after extending, we ignore them. */
1902 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
1903 return false;
1904 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
1905 return false;
1907 /* If there is some extend, it must match signedness of the comparison. */
1908 switch (cond)
1910 case LE:
1911 case LT:
1912 if (iv0->extend == ZERO_EXTEND
1913 || iv1->extend == ZERO_EXTEND)
1914 return false;
1915 signed_p = true;
1916 break;
1918 case LEU:
1919 case LTU:
1920 if (iv0->extend == SIGN_EXTEND
1921 || iv1->extend == SIGN_EXTEND)
1922 return false;
1923 signed_p = false;
1924 break;
1926 case NE:
1927 if (iv0->extend != UNKNOWN
1928 && iv1->extend != UNKNOWN
1929 && iv0->extend != iv1->extend)
1930 return false;
1932 signed_p = false;
1933 if (iv0->extend != UNKNOWN)
1934 signed_p = iv0->extend == SIGN_EXTEND;
1935 if (iv1->extend != UNKNOWN)
1936 signed_p = iv1->extend == SIGN_EXTEND;
1937 break;
1939 default:
1940 gcc_unreachable ();
1943 /* Values of both variables should be computed in the same mode. These
1944 might indeed be different, if we have comparison like
1946 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1948 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1949 in different modes. This does not seem impossible to handle, but
1950 it hardly ever occurs in practice.
1952 The only exception is the case when one of operands is invariant.
1953 For example pentium 3 generates comparisons like
1954 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1955 definitely do not want this prevent the optimization. */
1956 comp_mode = iv0->extend_mode;
1957 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
1958 comp_mode = iv1->extend_mode;
1960 if (iv0->extend_mode != comp_mode)
1962 if (iv0->mode != iv0->extend_mode
1963 || iv0->step != const0_rtx)
1964 return false;
1966 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1967 comp_mode, iv0->base, iv0->mode);
1968 iv0->extend_mode = comp_mode;
1971 if (iv1->extend_mode != comp_mode)
1973 if (iv1->mode != iv1->extend_mode
1974 || iv1->step != const0_rtx)
1975 return false;
1977 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1978 comp_mode, iv1->base, iv1->mode);
1979 iv1->extend_mode = comp_mode;
1982 /* Check that both ivs belong to a range of a single mode. If one of the
1983 operands is an invariant, we may need to shorten it into the common
1984 mode. */
1985 if (iv0->mode == iv0->extend_mode
1986 && iv0->step == const0_rtx
1987 && iv0->mode != iv1->mode)
1988 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
1990 if (iv1->mode == iv1->extend_mode
1991 && iv1->step == const0_rtx
1992 && iv0->mode != iv1->mode)
1993 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
1995 if (iv0->mode != iv1->mode)
1996 return false;
1998 desc->mode = iv0->mode;
1999 desc->signed_p = signed_p;
2001 return true;
2004 /* Tries to estimate the maximum number of iterations. */
2006 static unsigned HOST_WIDEST_INT
2007 determine_max_iter (struct loop *loop, struct niter_desc *desc)
2009 rtx niter = desc->niter_expr;
2010 rtx mmin, mmax, cmp;
2011 unsigned HOST_WIDEST_INT nmax, inc;
2013 if (GET_CODE (niter) == AND
2014 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
2016 nmax = INTVAL (XEXP (niter, 0));
2017 if (!(nmax & (nmax + 1)))
2019 desc->niter_max = nmax;
2020 return nmax;
2024 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2025 nmax = INTVAL (mmax) - INTVAL (mmin);
2027 if (GET_CODE (niter) == UDIV)
2029 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
2031 desc->niter_max = nmax;
2032 return nmax;
2034 inc = INTVAL (XEXP (niter, 1));
2035 niter = XEXP (niter, 0);
2037 else
2038 inc = 1;
2040 /* We could use a binary search here, but for now improving the upper
2041 bound by just one eliminates one important corner case. */
2042 cmp = gen_rtx_fmt_ee (desc->signed_p ? LT : LTU, VOIDmode, niter, mmax);
2043 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2044 if (cmp == const_true_rtx)
2046 nmax--;
2048 if (dump_file)
2049 fprintf (dump_file, ";; improved upper bound by one.\n");
2051 desc->niter_max = nmax / inc;
2052 return nmax / inc;
2055 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2056 the result into DESC. Very similar to determine_number_of_iterations
2057 (basically its rtl version), complicated by things like subregs. */
2059 static void
2060 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2061 struct niter_desc *desc)
2063 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2064 struct rtx_iv iv0, iv1, tmp_iv;
2065 rtx assumption, may_not_xform;
2066 enum rtx_code cond;
2067 enum machine_mode mode, comp_mode;
2068 rtx mmin, mmax, mode_mmin, mode_mmax;
2069 unsigned HOST_WIDEST_INT s, size, d, inv;
2070 HOST_WIDEST_INT up, down, inc, step_val;
2071 int was_sharp = false;
2072 rtx old_niter;
2073 bool step_is_pow2;
2075 /* The meaning of these assumptions is this:
2076 if !assumptions
2077 then the rest of information does not have to be valid
2078 if noloop_assumptions then the loop does not roll
2079 if infinite then this exit is never used */
2081 desc->assumptions = NULL_RTX;
2082 desc->noloop_assumptions = NULL_RTX;
2083 desc->infinite = NULL_RTX;
2084 desc->simple_p = true;
2086 desc->const_iter = false;
2087 desc->niter_expr = NULL_RTX;
2088 desc->niter_max = 0;
2090 cond = GET_CODE (condition);
2091 gcc_assert (COMPARISON_P (condition));
2093 mode = GET_MODE (XEXP (condition, 0));
2094 if (mode == VOIDmode)
2095 mode = GET_MODE (XEXP (condition, 1));
2096 /* The constant comparisons should be folded. */
2097 gcc_assert (mode != VOIDmode);
2099 /* We only handle integers or pointers. */
2100 if (GET_MODE_CLASS (mode) != MODE_INT
2101 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2102 goto fail;
2104 op0 = XEXP (condition, 0);
2105 if (!iv_analyze (insn, op0, &iv0))
2106 goto fail;
2107 if (iv0.extend_mode == VOIDmode)
2108 iv0.mode = iv0.extend_mode = mode;
2110 op1 = XEXP (condition, 1);
2111 if (!iv_analyze (insn, op1, &iv1))
2112 goto fail;
2113 if (iv1.extend_mode == VOIDmode)
2114 iv1.mode = iv1.extend_mode = mode;
2116 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2117 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2118 goto fail;
2120 /* Check condition and normalize it. */
2122 switch (cond)
2124 case GE:
2125 case GT:
2126 case GEU:
2127 case GTU:
2128 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2129 cond = swap_condition (cond);
2130 break;
2131 case NE:
2132 case LE:
2133 case LEU:
2134 case LT:
2135 case LTU:
2136 break;
2137 default:
2138 goto fail;
2141 /* Handle extends. This is relatively nontrivial, so we only try in some
2142 easy cases, when we can canonicalize the ivs (possibly by adding some
2143 assumptions) to shape subreg (base + i * step). This function also fills
2144 in desc->mode and desc->signed_p. */
2146 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2147 goto fail;
2149 comp_mode = iv0.extend_mode;
2150 mode = iv0.mode;
2151 size = GET_MODE_BITSIZE (mode);
2152 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2153 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2154 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2156 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2157 goto fail;
2159 /* We can take care of the case of two induction variables chasing each other
2160 if the test is NE. I have never seen a loop using it, but still it is
2161 cool. */
2162 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2164 if (cond != NE)
2165 goto fail;
2167 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2168 iv1.step = const0_rtx;
2171 /* This is either infinite loop or the one that ends immediately, depending
2172 on initial values. Unswitching should remove this kind of conditions. */
2173 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2174 goto fail;
2176 if (cond != NE)
2178 if (iv0.step == const0_rtx)
2179 step_val = -INTVAL (iv1.step);
2180 else
2181 step_val = INTVAL (iv0.step);
2183 /* Ignore loops of while (i-- < 10) type. */
2184 if (step_val < 0)
2185 goto fail;
2187 step_is_pow2 = !(step_val & (step_val - 1));
2189 else
2191 /* We do not care about whether the step is power of two in this
2192 case. */
2193 step_is_pow2 = false;
2194 step_val = 0;
2197 /* Some more condition normalization. We must record some assumptions
2198 due to overflows. */
2199 switch (cond)
2201 case LT:
2202 case LTU:
2203 /* We want to take care only of non-sharp relationals; this is easy,
2204 as in cases the overflow would make the transformation unsafe
2205 the loop does not roll. Seemingly it would make more sense to want
2206 to take care of sharp relationals instead, as NE is more similar to
2207 them, but the problem is that here the transformation would be more
2208 difficult due to possibly infinite loops. */
2209 if (iv0.step == const0_rtx)
2211 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2212 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2213 mode_mmax);
2214 if (assumption == const_true_rtx)
2215 goto zero_iter_simplify;
2216 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2217 iv0.base, const1_rtx);
2219 else
2221 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2222 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2223 mode_mmin);
2224 if (assumption == const_true_rtx)
2225 goto zero_iter_simplify;
2226 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2227 iv1.base, constm1_rtx);
2230 if (assumption != const0_rtx)
2231 desc->noloop_assumptions =
2232 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2233 cond = (cond == LT) ? LE : LEU;
2235 /* It will be useful to be able to tell the difference once more in
2236 LE -> NE reduction. */
2237 was_sharp = true;
2238 break;
2239 default: ;
2242 /* Take care of trivially infinite loops. */
2243 if (cond != NE)
2245 if (iv0.step == const0_rtx)
2247 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2248 if (rtx_equal_p (tmp, mode_mmin))
2250 desc->infinite =
2251 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2252 /* Fill in the remaining fields somehow. */
2253 goto zero_iter_simplify;
2256 else
2258 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2259 if (rtx_equal_p (tmp, mode_mmax))
2261 desc->infinite =
2262 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2263 /* Fill in the remaining fields somehow. */
2264 goto zero_iter_simplify;
2269 /* If we can we want to take care of NE conditions instead of size
2270 comparisons, as they are much more friendly (most importantly
2271 this takes care of special handling of loops with step 1). We can
2272 do it if we first check that upper bound is greater or equal to
2273 lower bound, their difference is constant c modulo step and that
2274 there is not an overflow. */
2275 if (cond != NE)
2277 if (iv0.step == const0_rtx)
2278 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2279 else
2280 step = iv0.step;
2281 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2282 delta = lowpart_subreg (mode, delta, comp_mode);
2283 delta = simplify_gen_binary (UMOD, mode, delta, step);
2284 may_xform = const0_rtx;
2285 may_not_xform = const_true_rtx;
2287 if (GET_CODE (delta) == CONST_INT)
2289 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2291 /* A special case. We have transformed condition of type
2292 for (i = 0; i < 4; i += 4)
2293 into
2294 for (i = 0; i <= 3; i += 4)
2295 obviously if the test for overflow during that transformation
2296 passed, we cannot overflow here. Most importantly any
2297 loop with sharp end condition and step 1 falls into this
2298 category, so handling this case specially is definitely
2299 worth the troubles. */
2300 may_xform = const_true_rtx;
2302 else if (iv0.step == const0_rtx)
2304 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2305 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2306 bound = lowpart_subreg (mode, bound, comp_mode);
2307 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2308 may_xform = simplify_gen_relational (cond, SImode, mode,
2309 bound, tmp);
2310 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2311 SImode, mode,
2312 bound, tmp);
2314 else
2316 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2317 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2318 bound = lowpart_subreg (mode, bound, comp_mode);
2319 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2320 may_xform = simplify_gen_relational (cond, SImode, mode,
2321 tmp, bound);
2322 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2323 SImode, mode,
2324 tmp, bound);
2328 if (may_xform != const0_rtx)
2330 /* We perform the transformation always provided that it is not
2331 completely senseless. This is OK, as we would need this assumption
2332 to determine the number of iterations anyway. */
2333 if (may_xform != const_true_rtx)
2335 /* If the step is a power of two and the final value we have
2336 computed overflows, the cycle is infinite. Otherwise it
2337 is nontrivial to compute the number of iterations. */
2338 if (step_is_pow2)
2339 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2340 desc->infinite);
2341 else
2342 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2343 desc->assumptions);
2346 /* We are going to lose some information about upper bound on
2347 number of iterations in this step, so record the information
2348 here. */
2349 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2350 if (GET_CODE (iv1.base) == CONST_INT)
2351 up = INTVAL (iv1.base);
2352 else
2353 up = INTVAL (mode_mmax) - inc;
2354 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2355 ? iv0.base
2356 : mode_mmin);
2357 desc->niter_max = (up - down) / inc + 1;
2359 if (iv0.step == const0_rtx)
2361 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2362 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2364 else
2366 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2367 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2370 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2371 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2372 assumption = simplify_gen_relational (reverse_condition (cond),
2373 SImode, mode, tmp0, tmp1);
2374 if (assumption == const_true_rtx)
2375 goto zero_iter_simplify;
2376 else if (assumption != const0_rtx)
2377 desc->noloop_assumptions =
2378 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2379 cond = NE;
2383 /* Count the number of iterations. */
2384 if (cond == NE)
2386 /* Everything we do here is just arithmetics modulo size of mode. This
2387 makes us able to do more involved computations of number of iterations
2388 than in other cases. First transform the condition into shape
2389 s * i <> c, with s positive. */
2390 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2391 iv0.base = const0_rtx;
2392 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2393 iv1.step = const0_rtx;
2394 if (INTVAL (iv0.step) < 0)
2396 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2397 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2399 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2401 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2402 is infinite. Otherwise, the number of iterations is
2403 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2404 s = INTVAL (iv0.step); d = 1;
2405 while (s % 2 != 1)
2407 s /= 2;
2408 d *= 2;
2409 size--;
2411 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2413 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2414 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2415 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2416 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2418 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2419 inv = inverse (s, size);
2420 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2421 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2423 else
2425 if (iv1.step == const0_rtx)
2426 /* Condition in shape a + s * i <= b
2427 We must know that b + s does not overflow and a <= b + s and then we
2428 can compute number of iterations as (b + s - a) / s. (It might
2429 seem that we in fact could be more clever about testing the b + s
2430 overflow condition using some information about b - a mod s,
2431 but it was already taken into account during LE -> NE transform). */
2433 step = iv0.step;
2434 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2435 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2437 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2438 lowpart_subreg (mode, step,
2439 comp_mode));
2440 if (step_is_pow2)
2442 rtx t0, t1;
2444 /* If s is power of 2, we know that the loop is infinite if
2445 a % s <= b % s and b + s overflows. */
2446 assumption = simplify_gen_relational (reverse_condition (cond),
2447 SImode, mode,
2448 tmp1, bound);
2450 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2451 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2452 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2453 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2454 desc->infinite =
2455 alloc_EXPR_LIST (0, assumption, desc->infinite);
2457 else
2459 assumption = simplify_gen_relational (cond, SImode, mode,
2460 tmp1, bound);
2461 desc->assumptions =
2462 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2465 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2466 tmp = lowpart_subreg (mode, tmp, comp_mode);
2467 assumption = simplify_gen_relational (reverse_condition (cond),
2468 SImode, mode, tmp0, tmp);
2470 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2471 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2473 else
2475 /* Condition in shape a <= b - s * i
2476 We must know that a - s does not overflow and a - s <= b and then
2477 we can again compute number of iterations as (b - (a - s)) / s. */
2478 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2479 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2480 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2482 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2483 lowpart_subreg (mode, step, comp_mode));
2484 if (step_is_pow2)
2486 rtx t0, t1;
2488 /* If s is power of 2, we know that the loop is infinite if
2489 a % s <= b % s and a - s overflows. */
2490 assumption = simplify_gen_relational (reverse_condition (cond),
2491 SImode, mode,
2492 bound, tmp0);
2494 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2495 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2496 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2497 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2498 desc->infinite =
2499 alloc_EXPR_LIST (0, assumption, desc->infinite);
2501 else
2503 assumption = simplify_gen_relational (cond, SImode, mode,
2504 bound, tmp0);
2505 desc->assumptions =
2506 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2509 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2510 tmp = lowpart_subreg (mode, tmp, comp_mode);
2511 assumption = simplify_gen_relational (reverse_condition (cond),
2512 SImode, mode,
2513 tmp, tmp1);
2514 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2515 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2517 if (assumption == const_true_rtx)
2518 goto zero_iter_simplify;
2519 else if (assumption != const0_rtx)
2520 desc->noloop_assumptions =
2521 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2522 delta = simplify_gen_binary (UDIV, mode, delta, step);
2523 desc->niter_expr = delta;
2526 old_niter = desc->niter_expr;
2528 simplify_using_initial_values (loop, AND, &desc->assumptions);
2529 if (desc->assumptions
2530 && XEXP (desc->assumptions, 0) == const0_rtx)
2531 goto fail;
2532 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2533 simplify_using_initial_values (loop, IOR, &desc->infinite);
2534 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2536 /* Rerun the simplification. Consider code (created by copying loop headers)
2538 i = 0;
2540 if (0 < n)
2544 i++;
2545 } while (i < n);
2548 The first pass determines that i = 0, the second pass uses it to eliminate
2549 noloop assumption. */
2551 simplify_using_initial_values (loop, AND, &desc->assumptions);
2552 if (desc->assumptions
2553 && XEXP (desc->assumptions, 0) == const0_rtx)
2554 goto fail;
2555 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2556 simplify_using_initial_values (loop, IOR, &desc->infinite);
2557 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2559 if (desc->noloop_assumptions
2560 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2561 goto zero_iter;
2563 if (GET_CODE (desc->niter_expr) == CONST_INT)
2565 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2567 desc->const_iter = true;
2568 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2570 else
2572 if (!desc->niter_max)
2573 desc->niter_max = determine_max_iter (loop, desc);
2575 /* simplify_using_initial_values does a copy propagation on the registers
2576 in the expression for the number of iterations. This prolongs life
2577 ranges of registers and increases register pressure, and usually
2578 brings no gain (and if it happens to do, the cse pass will take care
2579 of it anyway). So prevent this behavior, unless it enabled us to
2580 derive that the number of iterations is a constant. */
2581 desc->niter_expr = old_niter;
2584 return;
2586 zero_iter_simplify:
2587 /* Simplify the assumptions. */
2588 simplify_using_initial_values (loop, AND, &desc->assumptions);
2589 if (desc->assumptions
2590 && XEXP (desc->assumptions, 0) == const0_rtx)
2591 goto fail;
2592 simplify_using_initial_values (loop, IOR, &desc->infinite);
2594 /* Fallthru. */
2595 zero_iter:
2596 desc->const_iter = true;
2597 desc->niter = 0;
2598 desc->niter_max = 0;
2599 desc->noloop_assumptions = NULL_RTX;
2600 desc->niter_expr = const0_rtx;
2601 return;
2603 fail:
2604 desc->simple_p = false;
2605 return;
2608 /* Checks whether E is a simple exit from LOOP and stores its description
2609 into DESC. */
2611 static void
2612 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2614 basic_block exit_bb;
2615 rtx condition, at;
2616 edge ein;
2618 exit_bb = e->src;
2619 desc->simple_p = false;
2621 /* It must belong directly to the loop. */
2622 if (exit_bb->loop_father != loop)
2623 return;
2625 /* It must be tested (at least) once during any iteration. */
2626 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2627 return;
2629 /* It must end in a simple conditional jump. */
2630 if (!any_condjump_p (BB_END (exit_bb)))
2631 return;
2633 ein = EDGE_SUCC (exit_bb, 0);
2634 if (ein == e)
2635 ein = EDGE_SUCC (exit_bb, 1);
2637 desc->out_edge = e;
2638 desc->in_edge = ein;
2640 /* Test whether the condition is suitable. */
2641 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2642 return;
2644 if (ein->flags & EDGE_FALLTHRU)
2646 condition = reversed_condition (condition);
2647 if (!condition)
2648 return;
2651 /* Check that we are able to determine number of iterations and fill
2652 in information about it. */
2653 iv_number_of_iterations (loop, at, condition, desc);
2656 /* Finds a simple exit of LOOP and stores its description into DESC. */
2658 void
2659 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2661 unsigned i;
2662 basic_block *body;
2663 edge e;
2664 struct niter_desc act;
2665 bool any = false;
2666 edge_iterator ei;
2668 desc->simple_p = false;
2669 body = get_loop_body (loop);
2671 for (i = 0; i < loop->num_nodes; i++)
2673 FOR_EACH_EDGE (e, ei, body[i]->succs)
2675 if (flow_bb_inside_loop_p (loop, e->dest))
2676 continue;
2678 check_simple_exit (loop, e, &act);
2679 if (!act.simple_p)
2680 continue;
2682 if (!any)
2683 any = true;
2684 else
2686 /* Prefer constant iterations; the less the better. */
2687 if (!act.const_iter
2688 || (desc->const_iter && act.niter >= desc->niter))
2689 continue;
2691 /* Also if the actual exit may be infinite, while the old one
2692 not, prefer the old one. */
2693 if (act.infinite && !desc->infinite)
2694 continue;
2697 *desc = act;
2701 if (dump_file)
2703 if (desc->simple_p)
2705 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2706 fprintf (dump_file, " simple exit %d -> %d\n",
2707 desc->out_edge->src->index,
2708 desc->out_edge->dest->index);
2709 if (desc->assumptions)
2711 fprintf (dump_file, " assumptions: ");
2712 print_rtl (dump_file, desc->assumptions);
2713 fprintf (dump_file, "\n");
2715 if (desc->noloop_assumptions)
2717 fprintf (dump_file, " does not roll if: ");
2718 print_rtl (dump_file, desc->noloop_assumptions);
2719 fprintf (dump_file, "\n");
2721 if (desc->infinite)
2723 fprintf (dump_file, " infinite if: ");
2724 print_rtl (dump_file, desc->infinite);
2725 fprintf (dump_file, "\n");
2728 fprintf (dump_file, " number of iterations: ");
2729 print_rtl (dump_file, desc->niter_expr);
2730 fprintf (dump_file, "\n");
2732 fprintf (dump_file, " upper bound: ");
2733 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2734 fprintf (dump_file, "\n");
2736 else
2737 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2740 free (body);
2743 /* Creates a simple loop description of LOOP if it was not computed
2744 already. */
2746 struct niter_desc *
2747 get_simple_loop_desc (struct loop *loop)
2749 struct niter_desc *desc = simple_loop_desc (loop);
2751 if (desc)
2752 return desc;
2754 desc = XNEW (struct niter_desc);
2755 iv_analysis_loop_init (loop);
2756 find_simple_exit (loop, desc);
2757 loop->aux = desc;
2759 if (desc->simple_p && (desc->assumptions || desc->infinite))
2761 const char *wording;
2763 /* Assume that no overflow happens and that the loop is finite.
2764 We already warned at the tree level if we ran optimizations there. */
2765 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
2767 if (desc->infinite)
2769 wording =
2770 flag_unsafe_loop_optimizations
2771 ? N_("assuming that the loop is not infinite")
2772 : N_("cannot optimize possibly infinite loops");
2773 warning (OPT_Wunsafe_loop_optimizations, "%s",
2774 gettext (wording));
2776 if (desc->assumptions)
2778 wording =
2779 flag_unsafe_loop_optimizations
2780 ? N_("assuming that the loop counter does not overflow")
2781 : N_("cannot optimize loop, the loop counter may overflow");
2782 warning (OPT_Wunsafe_loop_optimizations, "%s",
2783 gettext (wording));
2787 if (flag_unsafe_loop_optimizations)
2789 desc->assumptions = NULL_RTX;
2790 desc->infinite = NULL_RTX;
2794 return desc;
2797 /* Releases simple loop description for LOOP. */
2799 void
2800 free_simple_loop_desc (struct loop *loop)
2802 struct niter_desc *desc = simple_loop_desc (loop);
2804 if (!desc)
2805 return;
2807 free (desc);
2808 loop->aux = NULL;