re PR middle-end/39867 (Wrong result of conditional operator exp < 2 ? 2U : (unsigned...
[official-gcc.git] / gcc / loop-iv.c
blob940373609755d2a7bb5a9e5803209240a15b8670
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This is a simple analysis of induction variables of the loop. The major use
22 is for determining the number of iterations of a loop for loop unrolling,
23 doloop optimization and branch prediction. The iv information is computed
24 on demand.
26 Induction variables are analyzed by walking the use-def chains. When
27 a basic induction variable (biv) is found, it is cached in the bivs
28 hash table. When register is proved to be a biv, its description
29 is stored to DF_REF_DATA of the def reference.
31 The analysis works always with one loop -- you must call
32 iv_analysis_loop_init (loop) for it. All the other functions then work with
33 this loop. When you need to work with another loop, just call
34 iv_analysis_loop_init for it. When you no longer need iv analysis, call
35 iv_analysis_done () to clean up the memory.
37 The available functions are:
39 iv_analyze (insn, reg, iv): Stores the description of the induction variable
40 corresponding to the use of register REG in INSN to IV. Returns true if
41 REG is an induction variable in INSN. false otherwise.
42 If use of REG is not found in INSN, following insns are scanned (so that
43 we may call this function on insn returned by get_condition).
44 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
45 corresponding to DEF, which is a register defined in INSN.
46 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
47 corresponding to expression EXPR evaluated at INSN. All registers used bu
48 EXPR must also be used in INSN.
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "hard-reg-set.h"
57 #include "obstack.h"
58 #include "basic-block.h"
59 #include "cfgloop.h"
60 #include "expr.h"
61 #include "intl.h"
62 #include "output.h"
63 #include "toplev.h"
64 #include "df.h"
65 #include "hashtab.h"
67 /* Possible return values of iv_get_reaching_def. */
69 enum iv_grd_result
71 /* More than one reaching def, or reaching def that does not
72 dominate the use. */
73 GRD_INVALID,
75 /* The use is trivial invariant of the loop, i.e. is not changed
76 inside the loop. */
77 GRD_INVARIANT,
79 /* The use is reached by initial value and a value from the
80 previous iteration. */
81 GRD_MAYBE_BIV,
83 /* The use has single dominating def. */
84 GRD_SINGLE_DOM
87 /* Information about a biv. */
89 struct biv_entry
91 unsigned regno; /* The register of the biv. */
92 struct rtx_iv iv; /* Value of the biv. */
95 static bool clean_slate = true;
97 static unsigned int iv_ref_table_size = 0;
99 /* Table of rtx_ivs indexed by the df_ref uid field. */
100 static struct rtx_iv ** iv_ref_table;
102 /* Induction variable stored at the reference. */
103 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID(REF)]
104 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID(REF)] = (IV)
106 /* The current loop. */
108 static struct loop *current_loop;
110 /* Bivs of the current loop. */
112 static htab_t bivs;
114 static bool iv_analyze_op (rtx, rtx, struct rtx_iv *);
116 /* Dumps information about IV to FILE. */
118 extern void dump_iv_info (FILE *, struct rtx_iv *);
119 void
120 dump_iv_info (FILE *file, struct rtx_iv *iv)
122 if (!iv->base)
124 fprintf (file, "not simple");
125 return;
128 if (iv->step == const0_rtx
129 && !iv->first_special)
130 fprintf (file, "invariant ");
132 print_rtl (file, iv->base);
133 if (iv->step != const0_rtx)
135 fprintf (file, " + ");
136 print_rtl (file, iv->step);
137 fprintf (file, " * iteration");
139 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
141 if (iv->mode != iv->extend_mode)
142 fprintf (file, " %s to %s",
143 rtx_name[iv->extend],
144 GET_MODE_NAME (iv->extend_mode));
146 if (iv->mult != const1_rtx)
148 fprintf (file, " * ");
149 print_rtl (file, iv->mult);
151 if (iv->delta != const0_rtx)
153 fprintf (file, " + ");
154 print_rtl (file, iv->delta);
156 if (iv->first_special)
157 fprintf (file, " (first special)");
160 /* Generates a subreg to get the least significant part of EXPR (in mode
161 INNER_MODE) to OUTER_MODE. */
164 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
165 enum machine_mode inner_mode)
167 return simplify_gen_subreg (outer_mode, expr, inner_mode,
168 subreg_lowpart_offset (outer_mode, inner_mode));
171 static void
172 check_iv_ref_table_size (void)
174 if (iv_ref_table_size < DF_DEFS_TABLE_SIZE())
176 unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
177 iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
178 memset (&iv_ref_table[iv_ref_table_size], 0,
179 (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
180 iv_ref_table_size = new_size;
185 /* Checks whether REG is a well-behaved register. */
187 static bool
188 simple_reg_p (rtx reg)
190 unsigned r;
192 if (GET_CODE (reg) == SUBREG)
194 if (!subreg_lowpart_p (reg))
195 return false;
196 reg = SUBREG_REG (reg);
199 if (!REG_P (reg))
200 return false;
202 r = REGNO (reg);
203 if (HARD_REGISTER_NUM_P (r))
204 return false;
206 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
207 return false;
209 return true;
212 /* Clears the information about ivs stored in df. */
214 static void
215 clear_iv_info (void)
217 unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
218 struct rtx_iv *iv;
220 check_iv_ref_table_size ();
221 for (i = 0; i < n_defs; i++)
223 iv = iv_ref_table[i];
224 if (iv)
226 free (iv);
227 iv_ref_table[i] = NULL;
231 htab_empty (bivs);
234 /* Returns hash value for biv B. */
236 static hashval_t
237 biv_hash (const void *b)
239 return ((const struct biv_entry *) b)->regno;
242 /* Compares biv B and register R. */
244 static int
245 biv_eq (const void *b, const void *r)
247 return ((const struct biv_entry *) b)->regno == REGNO ((const_rtx) r);
250 /* Prepare the data for an induction variable analysis of a LOOP. */
252 void
253 iv_analysis_loop_init (struct loop *loop)
255 basic_block *body = get_loop_body_in_dom_order (loop), bb;
256 bitmap blocks = BITMAP_ALLOC (NULL);
257 unsigned i;
259 current_loop = loop;
261 /* Clear the information from the analysis of the previous loop. */
262 if (clean_slate)
264 df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
265 bivs = htab_create (10, biv_hash, biv_eq, free);
266 clean_slate = false;
268 else
269 clear_iv_info ();
271 for (i = 0; i < loop->num_nodes; i++)
273 bb = body[i];
274 bitmap_set_bit (blocks, bb->index);
276 /* Get rid of the ud chains before processing the rescans. Then add
277 the problem back. */
278 df_remove_problem (df_chain);
279 df_process_deferred_rescans ();
280 df_chain_add_problem (DF_UD_CHAIN);
281 df_set_blocks (blocks);
282 df_analyze ();
283 if (dump_file)
284 df_dump_region (dump_file);
286 check_iv_ref_table_size ();
287 BITMAP_FREE (blocks);
288 free (body);
291 /* Finds the definition of REG that dominates loop latch and stores
292 it to DEF. Returns false if there is not a single definition
293 dominating the latch. If REG has no definition in loop, DEF
294 is set to NULL and true is returned. */
296 static bool
297 latch_dominating_def (rtx reg, df_ref *def)
299 df_ref single_rd = NULL, adef;
300 unsigned regno = REGNO (reg);
301 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
303 for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
305 if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
306 || !bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
307 continue;
309 /* More than one reaching definition. */
310 if (single_rd)
311 return false;
313 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
314 return false;
316 single_rd = adef;
319 *def = single_rd;
320 return true;
323 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
325 static enum iv_grd_result
326 iv_get_reaching_def (rtx insn, rtx reg, df_ref *def)
328 df_ref use, adef;
329 basic_block def_bb, use_bb;
330 rtx def_insn;
331 bool dom_p;
333 *def = NULL;
334 if (!simple_reg_p (reg))
335 return GRD_INVALID;
336 if (GET_CODE (reg) == SUBREG)
337 reg = SUBREG_REG (reg);
338 gcc_assert (REG_P (reg));
340 use = df_find_use (insn, reg);
341 gcc_assert (use != NULL);
343 if (!DF_REF_CHAIN (use))
344 return GRD_INVARIANT;
346 /* More than one reaching def. */
347 if (DF_REF_CHAIN (use)->next)
348 return GRD_INVALID;
350 adef = DF_REF_CHAIN (use)->ref;
352 /* We do not handle setting only part of the register. */
353 if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
354 return GRD_INVALID;
356 def_insn = DF_REF_INSN (adef);
357 def_bb = DF_REF_BB (adef);
358 use_bb = BLOCK_FOR_INSN (insn);
360 if (use_bb == def_bb)
361 dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
362 else
363 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
365 if (dom_p)
367 *def = adef;
368 return GRD_SINGLE_DOM;
371 /* The definition does not dominate the use. This is still OK if
372 this may be a use of a biv, i.e. if the def_bb dominates loop
373 latch. */
374 if (just_once_each_iteration_p (current_loop, def_bb))
375 return GRD_MAYBE_BIV;
377 return GRD_INVALID;
380 /* Sets IV to invariant CST in MODE. Always returns true (just for
381 consistency with other iv manipulation functions that may fail). */
383 static bool
384 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
386 if (mode == VOIDmode)
387 mode = GET_MODE (cst);
389 iv->mode = mode;
390 iv->base = cst;
391 iv->step = const0_rtx;
392 iv->first_special = false;
393 iv->extend = UNKNOWN;
394 iv->extend_mode = iv->mode;
395 iv->delta = const0_rtx;
396 iv->mult = const1_rtx;
398 return true;
401 /* Evaluates application of subreg to MODE on IV. */
403 static bool
404 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
406 /* If iv is invariant, just calculate the new value. */
407 if (iv->step == const0_rtx
408 && !iv->first_special)
410 rtx val = get_iv_value (iv, const0_rtx);
411 val = lowpart_subreg (mode, val, iv->extend_mode);
413 iv->base = val;
414 iv->extend = UNKNOWN;
415 iv->mode = iv->extend_mode = mode;
416 iv->delta = const0_rtx;
417 iv->mult = const1_rtx;
418 return true;
421 if (iv->extend_mode == mode)
422 return true;
424 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
425 return false;
427 iv->extend = UNKNOWN;
428 iv->mode = mode;
430 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
431 simplify_gen_binary (MULT, iv->extend_mode,
432 iv->base, iv->mult));
433 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
434 iv->mult = const1_rtx;
435 iv->delta = const0_rtx;
436 iv->first_special = false;
438 return true;
441 /* Evaluates application of EXTEND to MODE on IV. */
443 static bool
444 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
446 /* If iv is invariant, just calculate the new value. */
447 if (iv->step == const0_rtx
448 && !iv->first_special)
450 rtx val = get_iv_value (iv, const0_rtx);
451 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
453 iv->base = val;
454 iv->extend = UNKNOWN;
455 iv->mode = iv->extend_mode = mode;
456 iv->delta = const0_rtx;
457 iv->mult = const1_rtx;
458 return true;
461 if (mode != iv->extend_mode)
462 return false;
464 if (iv->extend != UNKNOWN
465 && iv->extend != extend)
466 return false;
468 iv->extend = extend;
470 return true;
473 /* Evaluates negation of IV. */
475 static bool
476 iv_neg (struct rtx_iv *iv)
478 if (iv->extend == UNKNOWN)
480 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
481 iv->base, iv->extend_mode);
482 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
483 iv->step, iv->extend_mode);
485 else
487 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
488 iv->delta, iv->extend_mode);
489 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
490 iv->mult, iv->extend_mode);
493 return true;
496 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
498 static bool
499 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
501 enum machine_mode mode;
502 rtx arg;
504 /* Extend the constant to extend_mode of the other operand if necessary. */
505 if (iv0->extend == UNKNOWN
506 && iv0->mode == iv0->extend_mode
507 && iv0->step == const0_rtx
508 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
510 iv0->extend_mode = iv1->extend_mode;
511 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
512 iv0->base, iv0->mode);
514 if (iv1->extend == UNKNOWN
515 && iv1->mode == iv1->extend_mode
516 && iv1->step == const0_rtx
517 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
519 iv1->extend_mode = iv0->extend_mode;
520 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
521 iv1->base, iv1->mode);
524 mode = iv0->extend_mode;
525 if (mode != iv1->extend_mode)
526 return false;
528 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
530 if (iv0->mode != iv1->mode)
531 return false;
533 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
534 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
536 return true;
539 /* Handle addition of constant. */
540 if (iv1->extend == UNKNOWN
541 && iv1->mode == mode
542 && iv1->step == const0_rtx)
544 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
545 return true;
548 if (iv0->extend == UNKNOWN
549 && iv0->mode == mode
550 && iv0->step == const0_rtx)
552 arg = iv0->base;
553 *iv0 = *iv1;
554 if (op == MINUS
555 && !iv_neg (iv0))
556 return false;
558 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
559 return true;
562 return false;
565 /* Evaluates multiplication of IV by constant CST. */
567 static bool
568 iv_mult (struct rtx_iv *iv, rtx mby)
570 enum machine_mode mode = iv->extend_mode;
572 if (GET_MODE (mby) != VOIDmode
573 && GET_MODE (mby) != mode)
574 return false;
576 if (iv->extend == UNKNOWN)
578 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
579 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
581 else
583 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
584 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
587 return true;
590 /* Evaluates shift of IV by constant CST. */
592 static bool
593 iv_shift (struct rtx_iv *iv, rtx mby)
595 enum machine_mode mode = iv->extend_mode;
597 if (GET_MODE (mby) != VOIDmode
598 && GET_MODE (mby) != mode)
599 return false;
601 if (iv->extend == UNKNOWN)
603 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
604 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
606 else
608 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
609 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
612 return true;
615 /* The recursive part of get_biv_step. Gets the value of the single value
616 defined by DEF wrto initial value of REG inside loop, in shape described
617 at get_biv_step. */
619 static bool
620 get_biv_step_1 (df_ref def, rtx reg,
621 rtx *inner_step, enum machine_mode *inner_mode,
622 enum rtx_code *extend, enum machine_mode outer_mode,
623 rtx *outer_step)
625 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
626 rtx next, nextr, tmp;
627 enum rtx_code code;
628 rtx insn = DF_REF_INSN (def);
629 df_ref next_def;
630 enum iv_grd_result res;
632 set = single_set (insn);
633 if (!set)
634 return false;
636 rhs = find_reg_equal_equiv_note (insn);
637 if (rhs)
638 rhs = XEXP (rhs, 0);
639 else
640 rhs = SET_SRC (set);
642 code = GET_CODE (rhs);
643 switch (code)
645 case SUBREG:
646 case REG:
647 next = rhs;
648 break;
650 case PLUS:
651 case MINUS:
652 op0 = XEXP (rhs, 0);
653 op1 = XEXP (rhs, 1);
655 if (code == PLUS && CONSTANT_P (op0))
657 tmp = op0; op0 = op1; op1 = tmp;
660 if (!simple_reg_p (op0)
661 || !CONSTANT_P (op1))
662 return false;
664 if (GET_MODE (rhs) != outer_mode)
666 /* ppc64 uses expressions like
668 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
670 this is equivalent to
672 (set x':DI (plus:DI y:DI 1))
673 (set x:SI (subreg:SI (x':DI)). */
674 if (GET_CODE (op0) != SUBREG)
675 return false;
676 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
677 return false;
680 next = op0;
681 break;
683 case SIGN_EXTEND:
684 case ZERO_EXTEND:
685 if (GET_MODE (rhs) != outer_mode)
686 return false;
688 op0 = XEXP (rhs, 0);
689 if (!simple_reg_p (op0))
690 return false;
692 next = op0;
693 break;
695 default:
696 return false;
699 if (GET_CODE (next) == SUBREG)
701 if (!subreg_lowpart_p (next))
702 return false;
704 nextr = SUBREG_REG (next);
705 if (GET_MODE (nextr) != outer_mode)
706 return false;
708 else
709 nextr = next;
711 res = iv_get_reaching_def (insn, nextr, &next_def);
713 if (res == GRD_INVALID || res == GRD_INVARIANT)
714 return false;
716 if (res == GRD_MAYBE_BIV)
718 if (!rtx_equal_p (nextr, reg))
719 return false;
721 *inner_step = const0_rtx;
722 *extend = UNKNOWN;
723 *inner_mode = outer_mode;
724 *outer_step = const0_rtx;
726 else if (!get_biv_step_1 (next_def, reg,
727 inner_step, inner_mode, extend, outer_mode,
728 outer_step))
729 return false;
731 if (GET_CODE (next) == SUBREG)
733 enum machine_mode amode = GET_MODE (next);
735 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
736 return false;
738 *inner_mode = amode;
739 *inner_step = simplify_gen_binary (PLUS, outer_mode,
740 *inner_step, *outer_step);
741 *outer_step = const0_rtx;
742 *extend = UNKNOWN;
745 switch (code)
747 case REG:
748 case SUBREG:
749 break;
751 case PLUS:
752 case MINUS:
753 if (*inner_mode == outer_mode
754 /* See comment in previous switch. */
755 || GET_MODE (rhs) != outer_mode)
756 *inner_step = simplify_gen_binary (code, outer_mode,
757 *inner_step, op1);
758 else
759 *outer_step = simplify_gen_binary (code, outer_mode,
760 *outer_step, op1);
761 break;
763 case SIGN_EXTEND:
764 case ZERO_EXTEND:
765 gcc_assert (GET_MODE (op0) == *inner_mode
766 && *extend == UNKNOWN
767 && *outer_step == const0_rtx);
769 *extend = code;
770 break;
772 default:
773 return false;
776 return true;
779 /* Gets the operation on register REG inside loop, in shape
781 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
783 If the operation cannot be described in this shape, return false.
784 LAST_DEF is the definition of REG that dominates loop latch. */
786 static bool
787 get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
788 enum machine_mode *inner_mode, enum rtx_code *extend,
789 enum machine_mode *outer_mode, rtx *outer_step)
791 *outer_mode = GET_MODE (reg);
793 if (!get_biv_step_1 (last_def, reg,
794 inner_step, inner_mode, extend, *outer_mode,
795 outer_step))
796 return false;
798 gcc_assert ((*inner_mode == *outer_mode) != (*extend != UNKNOWN));
799 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
801 return true;
804 /* Records information that DEF is induction variable IV. */
806 static void
807 record_iv (df_ref def, struct rtx_iv *iv)
809 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
811 *recorded_iv = *iv;
812 check_iv_ref_table_size ();
813 DF_REF_IV_SET (def, recorded_iv);
816 /* If DEF was already analyzed for bivness, store the description of the biv to
817 IV and return true. Otherwise return false. */
819 static bool
820 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
822 struct biv_entry *biv =
823 (struct biv_entry *) htab_find_with_hash (bivs, def, REGNO (def));
825 if (!biv)
826 return false;
828 *iv = biv->iv;
829 return true;
832 static void
833 record_biv (rtx def, struct rtx_iv *iv)
835 struct biv_entry *biv = XNEW (struct biv_entry);
836 void **slot = htab_find_slot_with_hash (bivs, def, REGNO (def), INSERT);
838 biv->regno = REGNO (def);
839 biv->iv = *iv;
840 gcc_assert (!*slot);
841 *slot = biv;
844 /* Determines whether DEF is a biv and if so, stores its description
845 to *IV. */
847 static bool
848 iv_analyze_biv (rtx def, struct rtx_iv *iv)
850 rtx inner_step, outer_step;
851 enum machine_mode inner_mode, outer_mode;
852 enum rtx_code extend;
853 df_ref last_def;
855 if (dump_file)
857 fprintf (dump_file, "Analyzing ");
858 print_rtl (dump_file, def);
859 fprintf (dump_file, " for bivness.\n");
862 if (!REG_P (def))
864 if (!CONSTANT_P (def))
865 return false;
867 return iv_constant (iv, def, VOIDmode);
870 if (!latch_dominating_def (def, &last_def))
872 if (dump_file)
873 fprintf (dump_file, " not simple.\n");
874 return false;
877 if (!last_def)
878 return iv_constant (iv, def, VOIDmode);
880 if (analyzed_for_bivness_p (def, iv))
882 if (dump_file)
883 fprintf (dump_file, " already analysed.\n");
884 return iv->base != NULL_RTX;
887 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
888 &outer_mode, &outer_step))
890 iv->base = NULL_RTX;
891 goto end;
894 /* Loop transforms base to es (base + inner_step) + outer_step,
895 where es means extend of subreg between inner_mode and outer_mode.
896 The corresponding induction variable is
898 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
900 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
901 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
902 iv->mode = inner_mode;
903 iv->extend_mode = outer_mode;
904 iv->extend = extend;
905 iv->mult = const1_rtx;
906 iv->delta = outer_step;
907 iv->first_special = inner_mode != outer_mode;
909 end:
910 if (dump_file)
912 fprintf (dump_file, " ");
913 dump_iv_info (dump_file, iv);
914 fprintf (dump_file, "\n");
917 record_biv (def, iv);
918 return iv->base != NULL_RTX;
921 /* Analyzes expression RHS used at INSN and stores the result to *IV.
922 The mode of the induction variable is MODE. */
924 bool
925 iv_analyze_expr (rtx insn, rtx rhs, enum machine_mode mode, struct rtx_iv *iv)
927 rtx mby = NULL_RTX, tmp;
928 rtx op0 = NULL_RTX, op1 = NULL_RTX;
929 struct rtx_iv iv0, iv1;
930 enum rtx_code code = GET_CODE (rhs);
931 enum machine_mode omode = mode;
933 iv->mode = VOIDmode;
934 iv->base = NULL_RTX;
935 iv->step = NULL_RTX;
937 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
939 if (CONSTANT_P (rhs)
940 || REG_P (rhs)
941 || code == SUBREG)
943 if (!iv_analyze_op (insn, rhs, iv))
944 return false;
946 if (iv->mode == VOIDmode)
948 iv->mode = mode;
949 iv->extend_mode = mode;
952 return true;
955 switch (code)
957 case REG:
958 op0 = rhs;
959 break;
961 case SIGN_EXTEND:
962 case ZERO_EXTEND:
963 case NEG:
964 op0 = XEXP (rhs, 0);
965 omode = GET_MODE (op0);
966 break;
968 case PLUS:
969 case MINUS:
970 op0 = XEXP (rhs, 0);
971 op1 = XEXP (rhs, 1);
972 break;
974 case MULT:
975 op0 = XEXP (rhs, 0);
976 mby = XEXP (rhs, 1);
977 if (!CONSTANT_P (mby))
979 tmp = op0;
980 op0 = mby;
981 mby = tmp;
983 if (!CONSTANT_P (mby))
984 return false;
985 break;
987 case ASHIFT:
988 op0 = XEXP (rhs, 0);
989 mby = XEXP (rhs, 1);
990 if (!CONSTANT_P (mby))
991 return false;
992 break;
994 default:
995 return false;
998 if (op0
999 && !iv_analyze_expr (insn, op0, omode, &iv0))
1000 return false;
1002 if (op1
1003 && !iv_analyze_expr (insn, op1, omode, &iv1))
1004 return false;
1006 switch (code)
1008 case SIGN_EXTEND:
1009 case ZERO_EXTEND:
1010 if (!iv_extend (&iv0, code, mode))
1011 return false;
1012 break;
1014 case NEG:
1015 if (!iv_neg (&iv0))
1016 return false;
1017 break;
1019 case PLUS:
1020 case MINUS:
1021 if (!iv_add (&iv0, &iv1, code))
1022 return false;
1023 break;
1025 case MULT:
1026 if (!iv_mult (&iv0, mby))
1027 return false;
1028 break;
1030 case ASHIFT:
1031 if (!iv_shift (&iv0, mby))
1032 return false;
1033 break;
1035 default:
1036 break;
1039 *iv = iv0;
1040 return iv->base != NULL_RTX;
1043 /* Analyzes iv DEF and stores the result to *IV. */
1045 static bool
1046 iv_analyze_def (df_ref def, struct rtx_iv *iv)
1048 rtx insn = DF_REF_INSN (def);
1049 rtx reg = DF_REF_REG (def);
1050 rtx set, rhs;
1052 if (dump_file)
1054 fprintf (dump_file, "Analyzing def of ");
1055 print_rtl (dump_file, reg);
1056 fprintf (dump_file, " in insn ");
1057 print_rtl_single (dump_file, insn);
1060 check_iv_ref_table_size ();
1061 if (DF_REF_IV (def))
1063 if (dump_file)
1064 fprintf (dump_file, " already analysed.\n");
1065 *iv = *DF_REF_IV (def);
1066 return iv->base != NULL_RTX;
1069 iv->mode = VOIDmode;
1070 iv->base = NULL_RTX;
1071 iv->step = NULL_RTX;
1073 if (!REG_P (reg))
1074 return false;
1076 set = single_set (insn);
1077 if (!set)
1078 return false;
1080 if (!REG_P (SET_DEST (set)))
1081 return false;
1083 gcc_assert (SET_DEST (set) == reg);
1084 rhs = find_reg_equal_equiv_note (insn);
1085 if (rhs)
1086 rhs = XEXP (rhs, 0);
1087 else
1088 rhs = SET_SRC (set);
1090 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1091 record_iv (def, iv);
1093 if (dump_file)
1095 print_rtl (dump_file, reg);
1096 fprintf (dump_file, " in insn ");
1097 print_rtl_single (dump_file, insn);
1098 fprintf (dump_file, " is ");
1099 dump_iv_info (dump_file, iv);
1100 fprintf (dump_file, "\n");
1103 return iv->base != NULL_RTX;
1106 /* Analyzes operand OP of INSN and stores the result to *IV. */
1108 static bool
1109 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
1111 df_ref def = NULL;
1112 enum iv_grd_result res;
1114 if (dump_file)
1116 fprintf (dump_file, "Analyzing operand ");
1117 print_rtl (dump_file, op);
1118 fprintf (dump_file, " of insn ");
1119 print_rtl_single (dump_file, insn);
1122 if (CONSTANT_P (op))
1123 res = GRD_INVARIANT;
1124 else if (GET_CODE (op) == SUBREG)
1126 if (!subreg_lowpart_p (op))
1127 return false;
1129 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1130 return false;
1132 return iv_subreg (iv, GET_MODE (op));
1134 else
1136 res = iv_get_reaching_def (insn, op, &def);
1137 if (res == GRD_INVALID)
1139 if (dump_file)
1140 fprintf (dump_file, " not simple.\n");
1141 return false;
1145 if (res == GRD_INVARIANT)
1147 iv_constant (iv, op, VOIDmode);
1149 if (dump_file)
1151 fprintf (dump_file, " ");
1152 dump_iv_info (dump_file, iv);
1153 fprintf (dump_file, "\n");
1155 return true;
1158 if (res == GRD_MAYBE_BIV)
1159 return iv_analyze_biv (op, iv);
1161 return iv_analyze_def (def, iv);
1164 /* Analyzes value VAL at INSN and stores the result to *IV. */
1166 bool
1167 iv_analyze (rtx insn, rtx val, struct rtx_iv *iv)
1169 rtx reg;
1171 /* We must find the insn in that val is used, so that we get to UD chains.
1172 Since the function is sometimes called on result of get_condition,
1173 this does not necessarily have to be directly INSN; scan also the
1174 following insns. */
1175 if (simple_reg_p (val))
1177 if (GET_CODE (val) == SUBREG)
1178 reg = SUBREG_REG (val);
1179 else
1180 reg = val;
1182 while (!df_find_use (insn, reg))
1183 insn = NEXT_INSN (insn);
1186 return iv_analyze_op (insn, val, iv);
1189 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1191 bool
1192 iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
1194 df_ref adef;
1196 adef = df_find_def (insn, def);
1197 if (!adef)
1198 return false;
1200 return iv_analyze_def (adef, iv);
1203 /* Checks whether definition of register REG in INSN is a basic induction
1204 variable. IV analysis must have been initialized (via a call to
1205 iv_analysis_loop_init) for this function to produce a result. */
1207 bool
1208 biv_p (rtx insn, rtx reg)
1210 struct rtx_iv iv;
1211 df_ref def, last_def;
1213 if (!simple_reg_p (reg))
1214 return false;
1216 def = df_find_def (insn, reg);
1217 gcc_assert (def != NULL);
1218 if (!latch_dominating_def (reg, &last_def))
1219 return false;
1220 if (last_def != def)
1221 return false;
1223 if (!iv_analyze_biv (reg, &iv))
1224 return false;
1226 return iv.step != const0_rtx;
1229 /* Calculates value of IV at ITERATION-th iteration. */
1232 get_iv_value (struct rtx_iv *iv, rtx iteration)
1234 rtx val;
1236 /* We would need to generate some if_then_else patterns, and so far
1237 it is not needed anywhere. */
1238 gcc_assert (!iv->first_special);
1240 if (iv->step != const0_rtx && iteration != const0_rtx)
1241 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1242 simplify_gen_binary (MULT, iv->extend_mode,
1243 iv->step, iteration));
1244 else
1245 val = iv->base;
1247 if (iv->extend_mode == iv->mode)
1248 return val;
1250 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1252 if (iv->extend == UNKNOWN)
1253 return val;
1255 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1256 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1257 simplify_gen_binary (MULT, iv->extend_mode,
1258 iv->mult, val));
1260 return val;
1263 /* Free the data for an induction variable analysis. */
1265 void
1266 iv_analysis_done (void)
1268 if (!clean_slate)
1270 clear_iv_info ();
1271 clean_slate = true;
1272 df_finish_pass (true);
1273 htab_delete (bivs);
1274 free (iv_ref_table);
1275 iv_ref_table = NULL;
1276 iv_ref_table_size = 0;
1277 bivs = NULL;
1281 /* Computes inverse to X modulo (1 << MOD). */
1283 static unsigned HOST_WIDEST_INT
1284 inverse (unsigned HOST_WIDEST_INT x, int mod)
1286 unsigned HOST_WIDEST_INT mask =
1287 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1288 unsigned HOST_WIDEST_INT rslt = 1;
1289 int i;
1291 for (i = 0; i < mod - 1; i++)
1293 rslt = (rslt * x) & mask;
1294 x = (x * x) & mask;
1297 return rslt;
1300 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1302 static int
1303 altered_reg_used (rtx *reg, void *alt)
1305 if (!REG_P (*reg))
1306 return 0;
1308 return REGNO_REG_SET_P ((bitmap) alt, REGNO (*reg));
1311 /* Marks registers altered by EXPR in set ALT. */
1313 static void
1314 mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1316 if (GET_CODE (expr) == SUBREG)
1317 expr = SUBREG_REG (expr);
1318 if (!REG_P (expr))
1319 return;
1321 SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1324 /* Checks whether RHS is simple enough to process. */
1326 static bool
1327 simple_rhs_p (rtx rhs)
1329 rtx op0, op1;
1331 if (CONSTANT_P (rhs)
1332 || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1333 return true;
1335 switch (GET_CODE (rhs))
1337 case PLUS:
1338 case MINUS:
1339 case AND:
1340 op0 = XEXP (rhs, 0);
1341 op1 = XEXP (rhs, 1);
1342 /* Allow reg OP const and reg OP reg. */
1343 if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1344 && !CONSTANT_P (op0))
1345 return false;
1346 if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1347 && !CONSTANT_P (op1))
1348 return false;
1350 return true;
1352 case ASHIFT:
1353 case ASHIFTRT:
1354 case LSHIFTRT:
1355 case MULT:
1356 op0 = XEXP (rhs, 0);
1357 op1 = XEXP (rhs, 1);
1358 /* Allow reg OP const. */
1359 if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1360 return false;
1361 if (!CONSTANT_P (op1))
1362 return false;
1364 return true;
1366 default:
1367 return false;
1371 /* If REG has a single definition, replace it with its known value in EXPR.
1372 Callback for for_each_rtx. */
1374 static int
1375 replace_single_def_regs (rtx *reg, void *expr1)
1377 unsigned regno;
1378 df_ref adef;
1379 rtx set;
1380 rtx *expr = (rtx *)expr1;
1382 if (!REG_P (*reg))
1383 return 0;
1385 regno = REGNO (*reg);
1386 adef = DF_REG_DEF_CHAIN (regno);
1387 if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1388 || DF_REF_IS_ARTIFICIAL (adef))
1389 return -1;
1391 set = single_set (DF_REF_INSN (adef));
1392 if (set == NULL || SET_DEST (set) != *reg || !CONSTANT_P (SET_SRC (set)))
1393 return -1;
1395 *expr = simplify_replace_rtx (*expr, *reg, SET_SRC (set));
1396 return 1;
1399 /* A subroutine of simplify_using_initial_values, this function examines INSN
1400 to see if it contains a suitable set that we can use to make a replacement.
1401 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1402 the set; return false otherwise. */
1404 static bool
1405 suitable_set_for_replacement (rtx insn, rtx *dest, rtx *src)
1407 rtx set = single_set (insn);
1408 rtx lhs = NULL_RTX, rhs;
1410 if (!set)
1411 return false;
1413 lhs = SET_DEST (set);
1414 if (!REG_P (lhs))
1415 return false;
1417 rhs = find_reg_equal_equiv_note (insn);
1418 if (rhs)
1419 rhs = XEXP (rhs, 0);
1420 else
1421 rhs = SET_SRC (set);
1423 if (!simple_rhs_p (rhs))
1424 return false;
1426 *dest = lhs;
1427 *src = rhs;
1428 return true;
1431 /* Using the data returned by suitable_set_for_replacement, replace DEST
1432 with SRC in *EXPR and return the new expression. Also call
1433 replace_single_def_regs if the replacement changed something. */
1434 static void
1435 replace_in_expr (rtx *expr, rtx dest, rtx src)
1437 rtx old = *expr;
1438 *expr = simplify_replace_rtx (*expr, dest, src);
1439 if (old == *expr)
1440 return;
1441 while (for_each_rtx (expr, replace_single_def_regs, expr) != 0)
1442 continue;
1445 /* Checks whether A implies B. */
1447 static bool
1448 implies_p (rtx a, rtx b)
1450 rtx op0, op1, opb0, opb1, r;
1451 enum machine_mode mode;
1453 if (GET_CODE (a) == EQ)
1455 op0 = XEXP (a, 0);
1456 op1 = XEXP (a, 1);
1458 if (REG_P (op0))
1460 r = simplify_replace_rtx (b, op0, op1);
1461 if (r == const_true_rtx)
1462 return true;
1465 if (REG_P (op1))
1467 r = simplify_replace_rtx (b, op1, op0);
1468 if (r == const_true_rtx)
1469 return true;
1473 if (b == const_true_rtx)
1474 return true;
1476 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1477 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1478 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1479 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1480 return false;
1482 op0 = XEXP (a, 0);
1483 op1 = XEXP (a, 1);
1484 opb0 = XEXP (b, 0);
1485 opb1 = XEXP (b, 1);
1487 mode = GET_MODE (op0);
1488 if (mode != GET_MODE (opb0))
1489 mode = VOIDmode;
1490 else if (mode == VOIDmode)
1492 mode = GET_MODE (op1);
1493 if (mode != GET_MODE (opb1))
1494 mode = VOIDmode;
1497 /* A < B implies A + 1 <= B. */
1498 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1499 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1502 if (GET_CODE (a) == GT)
1504 r = op0;
1505 op0 = op1;
1506 op1 = r;
1509 if (GET_CODE (b) == GE)
1511 r = opb0;
1512 opb0 = opb1;
1513 opb1 = r;
1516 if (SCALAR_INT_MODE_P (mode)
1517 && rtx_equal_p (op1, opb1)
1518 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1519 return true;
1520 return false;
1523 /* A < B or A > B imply A != B. TODO: Likewise
1524 A + n < B implies A != B + n if neither wraps. */
1525 if (GET_CODE (b) == NE
1526 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1527 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1529 if (rtx_equal_p (op0, opb0)
1530 && rtx_equal_p (op1, opb1))
1531 return true;
1534 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1535 if (GET_CODE (a) == NE
1536 && op1 == const0_rtx)
1538 if ((GET_CODE (b) == GTU
1539 && opb1 == const0_rtx)
1540 || (GET_CODE (b) == GEU
1541 && opb1 == const1_rtx))
1542 return rtx_equal_p (op0, opb0);
1545 /* A != N is equivalent to A - (N + 1) <u -1. */
1546 if (GET_CODE (a) == NE
1547 && GET_CODE (op1) == CONST_INT
1548 && GET_CODE (b) == LTU
1549 && opb1 == constm1_rtx
1550 && GET_CODE (opb0) == PLUS
1551 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1552 /* Avoid overflows. */
1553 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1554 != ((unsigned HOST_WIDE_INT)1
1555 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1556 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1557 return rtx_equal_p (op0, XEXP (opb0, 0));
1559 /* Likewise, A != N implies A - N > 0. */
1560 if (GET_CODE (a) == NE
1561 && GET_CODE (op1) == CONST_INT)
1563 if (GET_CODE (b) == GTU
1564 && GET_CODE (opb0) == PLUS
1565 && opb1 == const0_rtx
1566 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1567 /* Avoid overflows. */
1568 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1569 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1570 && rtx_equal_p (XEXP (opb0, 0), op0))
1571 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1572 if (GET_CODE (b) == GEU
1573 && GET_CODE (opb0) == PLUS
1574 && opb1 == const1_rtx
1575 && GET_CODE (XEXP (opb0, 1)) == CONST_INT
1576 /* Avoid overflows. */
1577 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1578 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1579 && rtx_equal_p (XEXP (opb0, 0), op0))
1580 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1583 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1584 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1585 && GET_CODE (op1) == CONST_INT
1586 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1587 || INTVAL (op1) >= 0)
1588 && GET_CODE (b) == LTU
1589 && GET_CODE (opb1) == CONST_INT
1590 && rtx_equal_p (op0, opb0))
1591 return INTVAL (opb1) < 0;
1593 return false;
1596 /* Canonicalizes COND so that
1598 (1) Ensure that operands are ordered according to
1599 swap_commutative_operands_p.
1600 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1601 for GE, GEU, and LEU. */
1604 canon_condition (rtx cond)
1606 rtx tem;
1607 rtx op0, op1;
1608 enum rtx_code code;
1609 enum machine_mode mode;
1611 code = GET_CODE (cond);
1612 op0 = XEXP (cond, 0);
1613 op1 = XEXP (cond, 1);
1615 if (swap_commutative_operands_p (op0, op1))
1617 code = swap_condition (code);
1618 tem = op0;
1619 op0 = op1;
1620 op1 = tem;
1623 mode = GET_MODE (op0);
1624 if (mode == VOIDmode)
1625 mode = GET_MODE (op1);
1626 gcc_assert (mode != VOIDmode);
1628 if (GET_CODE (op1) == CONST_INT
1629 && GET_MODE_CLASS (mode) != MODE_CC
1630 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1632 HOST_WIDE_INT const_val = INTVAL (op1);
1633 unsigned HOST_WIDE_INT uconst_val = const_val;
1634 unsigned HOST_WIDE_INT max_val
1635 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1637 switch (code)
1639 case LE:
1640 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1641 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1642 break;
1644 /* When cross-compiling, const_val might be sign-extended from
1645 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1646 case GE:
1647 if ((HOST_WIDE_INT) (const_val & max_val)
1648 != (((HOST_WIDE_INT) 1
1649 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1650 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1651 break;
1653 case LEU:
1654 if (uconst_val < max_val)
1655 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1656 break;
1658 case GEU:
1659 if (uconst_val != 0)
1660 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1661 break;
1663 default:
1664 break;
1668 if (op0 != XEXP (cond, 0)
1669 || op1 != XEXP (cond, 1)
1670 || code != GET_CODE (cond)
1671 || GET_MODE (cond) != SImode)
1672 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1674 return cond;
1677 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1678 set of altered regs. */
1680 void
1681 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1683 rtx rev, reve, exp = *expr;
1685 /* If some register gets altered later, we do not really speak about its
1686 value at the time of comparison. */
1687 if (altered
1688 && for_each_rtx (&cond, altered_reg_used, altered))
1689 return;
1691 if (GET_CODE (cond) == EQ
1692 && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1694 *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1695 return;
1698 if (!COMPARISON_P (exp))
1699 return;
1701 rev = reversed_condition (cond);
1702 reve = reversed_condition (exp);
1704 cond = canon_condition (cond);
1705 exp = canon_condition (exp);
1706 if (rev)
1707 rev = canon_condition (rev);
1708 if (reve)
1709 reve = canon_condition (reve);
1711 if (rtx_equal_p (exp, cond))
1713 *expr = const_true_rtx;
1714 return;
1717 if (rev && rtx_equal_p (exp, rev))
1719 *expr = const0_rtx;
1720 return;
1723 if (implies_p (cond, exp))
1725 *expr = const_true_rtx;
1726 return;
1729 if (reve && implies_p (cond, reve))
1731 *expr = const0_rtx;
1732 return;
1735 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1736 be false. */
1737 if (rev && implies_p (exp, rev))
1739 *expr = const0_rtx;
1740 return;
1743 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1744 if (rev && reve && implies_p (reve, rev))
1746 *expr = const_true_rtx;
1747 return;
1750 /* We would like to have some other tests here. TODO. */
1752 return;
1755 /* Use relationship between A and *B to eventually eliminate *B.
1756 OP is the operation we consider. */
1758 static void
1759 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1761 switch (op)
1763 case AND:
1764 /* If A implies *B, we may replace *B by true. */
1765 if (implies_p (a, *b))
1766 *b = const_true_rtx;
1767 break;
1769 case IOR:
1770 /* If *B implies A, we may replace *B by false. */
1771 if (implies_p (*b, a))
1772 *b = const0_rtx;
1773 break;
1775 default:
1776 gcc_unreachable ();
1780 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1781 operation we consider. */
1783 static void
1784 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1786 rtx elt;
1788 for (elt = tail; elt; elt = XEXP (elt, 1))
1789 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1790 for (elt = tail; elt; elt = XEXP (elt, 1))
1791 eliminate_implied_condition (op, XEXP (elt, 0), head);
1794 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1795 is a list, its elements are assumed to be combined using OP. */
1797 static void
1798 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1800 bool expression_valid;
1801 rtx head, tail, insn, cond_list, last_valid_expr;
1802 rtx neutral, aggr;
1803 regset altered, this_altered;
1804 edge e;
1806 if (!*expr)
1807 return;
1809 if (CONSTANT_P (*expr))
1810 return;
1812 if (GET_CODE (*expr) == EXPR_LIST)
1814 head = XEXP (*expr, 0);
1815 tail = XEXP (*expr, 1);
1817 eliminate_implied_conditions (op, &head, tail);
1819 switch (op)
1821 case AND:
1822 neutral = const_true_rtx;
1823 aggr = const0_rtx;
1824 break;
1826 case IOR:
1827 neutral = const0_rtx;
1828 aggr = const_true_rtx;
1829 break;
1831 default:
1832 gcc_unreachable ();
1835 simplify_using_initial_values (loop, UNKNOWN, &head);
1836 if (head == aggr)
1838 XEXP (*expr, 0) = aggr;
1839 XEXP (*expr, 1) = NULL_RTX;
1840 return;
1842 else if (head == neutral)
1844 *expr = tail;
1845 simplify_using_initial_values (loop, op, expr);
1846 return;
1848 simplify_using_initial_values (loop, op, &tail);
1850 if (tail && XEXP (tail, 0) == aggr)
1852 *expr = tail;
1853 return;
1856 XEXP (*expr, 0) = head;
1857 XEXP (*expr, 1) = tail;
1858 return;
1861 gcc_assert (op == UNKNOWN);
1863 for (;;)
1864 if (for_each_rtx (expr, replace_single_def_regs, expr) == 0)
1865 break;
1866 if (CONSTANT_P (*expr))
1867 return;
1869 e = loop_preheader_edge (loop);
1870 if (e->src == ENTRY_BLOCK_PTR)
1871 return;
1873 altered = ALLOC_REG_SET (&reg_obstack);
1874 this_altered = ALLOC_REG_SET (&reg_obstack);
1876 expression_valid = true;
1877 last_valid_expr = *expr;
1878 cond_list = NULL_RTX;
1879 while (1)
1881 insn = BB_END (e->src);
1882 if (any_condjump_p (insn))
1884 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1886 if (cond && (e->flags & EDGE_FALLTHRU))
1887 cond = reversed_condition (cond);
1888 if (cond)
1890 rtx old = *expr;
1891 simplify_using_condition (cond, expr, altered);
1892 if (old != *expr)
1894 rtx note;
1895 if (CONSTANT_P (*expr))
1896 goto out;
1897 for (note = cond_list; note; note = XEXP (note, 1))
1899 simplify_using_condition (XEXP (note, 0), expr, altered);
1900 if (CONSTANT_P (*expr))
1901 goto out;
1904 cond_list = alloc_EXPR_LIST (0, cond, cond_list);
1908 FOR_BB_INSNS_REVERSE (e->src, insn)
1910 rtx src, dest;
1911 rtx old = *expr;
1913 if (!INSN_P (insn))
1914 continue;
1916 CLEAR_REG_SET (this_altered);
1917 note_stores (PATTERN (insn), mark_altered, this_altered);
1918 if (CALL_P (insn))
1920 int i;
1922 /* Kill all call clobbered registers. */
1923 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1924 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1925 SET_REGNO_REG_SET (this_altered, i);
1928 if (suitable_set_for_replacement (insn, &dest, &src))
1930 rtx *pnote, *pnote_next;
1932 replace_in_expr (expr, dest, src);
1933 if (CONSTANT_P (*expr))
1934 goto out;
1936 for (pnote = &cond_list; *pnote; pnote = pnote_next)
1938 rtx note = *pnote;
1939 rtx old_cond = XEXP (note, 0);
1941 pnote_next = &XEXP (note, 1);
1942 replace_in_expr (&XEXP (note, 0), dest, src);
1944 /* We can no longer use a condition that has been simplified
1945 to a constant, and simplify_using_condition will abort if
1946 we try. */
1947 if (CONSTANT_P (XEXP (note, 0)))
1949 *pnote = *pnote_next;
1950 pnote_next = pnote;
1951 free_EXPR_LIST_node (note);
1953 /* Retry simplifications with this condition if either the
1954 expression or the condition changed. */
1955 else if (old_cond != XEXP (note, 0) || old != *expr)
1956 simplify_using_condition (XEXP (note, 0), expr, altered);
1959 else
1960 /* If we did not use this insn to make a replacement, any overlap
1961 between stores in this insn and our expression will cause the
1962 expression to become invalid. */
1963 if (for_each_rtx (expr, altered_reg_used, this_altered))
1964 goto out;
1966 if (CONSTANT_P (*expr))
1967 goto out;
1969 IOR_REG_SET (altered, this_altered);
1971 /* If the expression now contains regs that have been altered, we
1972 can't return it to the caller. However, it is still valid for
1973 further simplification, so keep searching to see if we can
1974 eventually turn it into a constant. */
1975 if (for_each_rtx (expr, altered_reg_used, altered))
1976 expression_valid = false;
1977 if (expression_valid)
1978 last_valid_expr = *expr;
1981 if (!single_pred_p (e->src)
1982 || single_pred (e->src) == ENTRY_BLOCK_PTR)
1983 break;
1984 e = single_pred_edge (e->src);
1987 out:
1988 free_EXPR_LIST_list (&cond_list);
1989 if (!CONSTANT_P (*expr))
1990 *expr = last_valid_expr;
1991 FREE_REG_SET (altered);
1992 FREE_REG_SET (this_altered);
1995 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1996 that IV occurs as left operands of comparison COND and its signedness
1997 is SIGNED_P to DESC. */
1999 static void
2000 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
2001 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2003 rtx mmin, mmax, cond_over, cond_under;
2005 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2006 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2007 iv->base, mmin);
2008 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2009 iv->base, mmax);
2011 switch (cond)
2013 case LE:
2014 case LT:
2015 case LEU:
2016 case LTU:
2017 if (cond_under != const0_rtx)
2018 desc->infinite =
2019 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2020 if (cond_over != const0_rtx)
2021 desc->noloop_assumptions =
2022 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2023 break;
2025 case GE:
2026 case GT:
2027 case GEU:
2028 case GTU:
2029 if (cond_over != const0_rtx)
2030 desc->infinite =
2031 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2032 if (cond_under != const0_rtx)
2033 desc->noloop_assumptions =
2034 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2035 break;
2037 case NE:
2038 if (cond_over != const0_rtx)
2039 desc->infinite =
2040 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2041 if (cond_under != const0_rtx)
2042 desc->infinite =
2043 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2044 break;
2046 default:
2047 gcc_unreachable ();
2050 iv->mode = mode;
2051 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
2054 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2055 subregs of the same mode if possible (sometimes it is necessary to add
2056 some assumptions to DESC). */
2058 static bool
2059 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2060 enum rtx_code cond, struct niter_desc *desc)
2062 enum machine_mode comp_mode;
2063 bool signed_p;
2065 /* If the ivs behave specially in the first iteration, or are
2066 added/multiplied after extending, we ignore them. */
2067 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2068 return false;
2069 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2070 return false;
2072 /* If there is some extend, it must match signedness of the comparison. */
2073 switch (cond)
2075 case LE:
2076 case LT:
2077 if (iv0->extend == ZERO_EXTEND
2078 || iv1->extend == ZERO_EXTEND)
2079 return false;
2080 signed_p = true;
2081 break;
2083 case LEU:
2084 case LTU:
2085 if (iv0->extend == SIGN_EXTEND
2086 || iv1->extend == SIGN_EXTEND)
2087 return false;
2088 signed_p = false;
2089 break;
2091 case NE:
2092 if (iv0->extend != UNKNOWN
2093 && iv1->extend != UNKNOWN
2094 && iv0->extend != iv1->extend)
2095 return false;
2097 signed_p = false;
2098 if (iv0->extend != UNKNOWN)
2099 signed_p = iv0->extend == SIGN_EXTEND;
2100 if (iv1->extend != UNKNOWN)
2101 signed_p = iv1->extend == SIGN_EXTEND;
2102 break;
2104 default:
2105 gcc_unreachable ();
2108 /* Values of both variables should be computed in the same mode. These
2109 might indeed be different, if we have comparison like
2111 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2113 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2114 in different modes. This does not seem impossible to handle, but
2115 it hardly ever occurs in practice.
2117 The only exception is the case when one of operands is invariant.
2118 For example pentium 3 generates comparisons like
2119 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2120 definitely do not want this prevent the optimization. */
2121 comp_mode = iv0->extend_mode;
2122 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2123 comp_mode = iv1->extend_mode;
2125 if (iv0->extend_mode != comp_mode)
2127 if (iv0->mode != iv0->extend_mode
2128 || iv0->step != const0_rtx)
2129 return false;
2131 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2132 comp_mode, iv0->base, iv0->mode);
2133 iv0->extend_mode = comp_mode;
2136 if (iv1->extend_mode != comp_mode)
2138 if (iv1->mode != iv1->extend_mode
2139 || iv1->step != const0_rtx)
2140 return false;
2142 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2143 comp_mode, iv1->base, iv1->mode);
2144 iv1->extend_mode = comp_mode;
2147 /* Check that both ivs belong to a range of a single mode. If one of the
2148 operands is an invariant, we may need to shorten it into the common
2149 mode. */
2150 if (iv0->mode == iv0->extend_mode
2151 && iv0->step == const0_rtx
2152 && iv0->mode != iv1->mode)
2153 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2155 if (iv1->mode == iv1->extend_mode
2156 && iv1->step == const0_rtx
2157 && iv0->mode != iv1->mode)
2158 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2160 if (iv0->mode != iv1->mode)
2161 return false;
2163 desc->mode = iv0->mode;
2164 desc->signed_p = signed_p;
2166 return true;
2169 /* Tries to estimate the maximum number of iterations in LOOP, and store the
2170 result in DESC. This function is called from iv_number_of_iterations with
2171 a number of fields in DESC already filled in. OLD_NITER is the original
2172 expression for the number of iterations, before we tried to simplify it. */
2174 static unsigned HOST_WIDEST_INT
2175 determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2177 rtx niter = desc->niter_expr;
2178 rtx mmin, mmax, cmp;
2179 unsigned HOST_WIDEST_INT nmax, inc;
2181 if (GET_CODE (niter) == AND
2182 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
2184 nmax = INTVAL (XEXP (niter, 0));
2185 if (!(nmax & (nmax + 1)))
2187 desc->niter_max = nmax;
2188 return nmax;
2192 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2193 nmax = INTVAL (mmax) - INTVAL (mmin);
2195 if (GET_CODE (niter) == UDIV)
2197 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
2199 desc->niter_max = nmax;
2200 return nmax;
2202 inc = INTVAL (XEXP (niter, 1));
2203 niter = XEXP (niter, 0);
2205 else
2206 inc = 1;
2208 /* We could use a binary search here, but for now improving the upper
2209 bound by just one eliminates one important corner case. */
2210 cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2211 desc->mode, old_niter, mmax);
2212 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2213 if (cmp == const_true_rtx)
2215 nmax--;
2217 if (dump_file)
2218 fprintf (dump_file, ";; improved upper bound by one.\n");
2220 desc->niter_max = nmax / inc;
2221 return nmax / inc;
2224 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2225 the result into DESC. Very similar to determine_number_of_iterations
2226 (basically its rtl version), complicated by things like subregs. */
2228 static void
2229 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2230 struct niter_desc *desc)
2232 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2233 struct rtx_iv iv0, iv1, tmp_iv;
2234 rtx assumption, may_not_xform;
2235 enum rtx_code cond;
2236 enum machine_mode mode, comp_mode;
2237 rtx mmin, mmax, mode_mmin, mode_mmax;
2238 unsigned HOST_WIDEST_INT s, size, d, inv;
2239 HOST_WIDEST_INT up, down, inc, step_val;
2240 int was_sharp = false;
2241 rtx old_niter;
2242 bool step_is_pow2;
2244 /* The meaning of these assumptions is this:
2245 if !assumptions
2246 then the rest of information does not have to be valid
2247 if noloop_assumptions then the loop does not roll
2248 if infinite then this exit is never used */
2250 desc->assumptions = NULL_RTX;
2251 desc->noloop_assumptions = NULL_RTX;
2252 desc->infinite = NULL_RTX;
2253 desc->simple_p = true;
2255 desc->const_iter = false;
2256 desc->niter_expr = NULL_RTX;
2257 desc->niter_max = 0;
2259 cond = GET_CODE (condition);
2260 gcc_assert (COMPARISON_P (condition));
2262 mode = GET_MODE (XEXP (condition, 0));
2263 if (mode == VOIDmode)
2264 mode = GET_MODE (XEXP (condition, 1));
2265 /* The constant comparisons should be folded. */
2266 gcc_assert (mode != VOIDmode);
2268 /* We only handle integers or pointers. */
2269 if (GET_MODE_CLASS (mode) != MODE_INT
2270 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2271 goto fail;
2273 op0 = XEXP (condition, 0);
2274 if (!iv_analyze (insn, op0, &iv0))
2275 goto fail;
2276 if (iv0.extend_mode == VOIDmode)
2277 iv0.mode = iv0.extend_mode = mode;
2279 op1 = XEXP (condition, 1);
2280 if (!iv_analyze (insn, op1, &iv1))
2281 goto fail;
2282 if (iv1.extend_mode == VOIDmode)
2283 iv1.mode = iv1.extend_mode = mode;
2285 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2286 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2287 goto fail;
2289 /* Check condition and normalize it. */
2291 switch (cond)
2293 case GE:
2294 case GT:
2295 case GEU:
2296 case GTU:
2297 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2298 cond = swap_condition (cond);
2299 break;
2300 case NE:
2301 case LE:
2302 case LEU:
2303 case LT:
2304 case LTU:
2305 break;
2306 default:
2307 goto fail;
2310 /* Handle extends. This is relatively nontrivial, so we only try in some
2311 easy cases, when we can canonicalize the ivs (possibly by adding some
2312 assumptions) to shape subreg (base + i * step). This function also fills
2313 in desc->mode and desc->signed_p. */
2315 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2316 goto fail;
2318 comp_mode = iv0.extend_mode;
2319 mode = iv0.mode;
2320 size = GET_MODE_BITSIZE (mode);
2321 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2322 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2323 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2325 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2326 goto fail;
2328 /* We can take care of the case of two induction variables chasing each other
2329 if the test is NE. I have never seen a loop using it, but still it is
2330 cool. */
2331 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2333 if (cond != NE)
2334 goto fail;
2336 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2337 iv1.step = const0_rtx;
2340 /* This is either infinite loop or the one that ends immediately, depending
2341 on initial values. Unswitching should remove this kind of conditions. */
2342 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2343 goto fail;
2345 if (cond != NE)
2347 if (iv0.step == const0_rtx)
2348 step_val = -INTVAL (iv1.step);
2349 else
2350 step_val = INTVAL (iv0.step);
2352 /* Ignore loops of while (i-- < 10) type. */
2353 if (step_val < 0)
2354 goto fail;
2356 step_is_pow2 = !(step_val & (step_val - 1));
2358 else
2360 /* We do not care about whether the step is power of two in this
2361 case. */
2362 step_is_pow2 = false;
2363 step_val = 0;
2366 /* Some more condition normalization. We must record some assumptions
2367 due to overflows. */
2368 switch (cond)
2370 case LT:
2371 case LTU:
2372 /* We want to take care only of non-sharp relationals; this is easy,
2373 as in cases the overflow would make the transformation unsafe
2374 the loop does not roll. Seemingly it would make more sense to want
2375 to take care of sharp relationals instead, as NE is more similar to
2376 them, but the problem is that here the transformation would be more
2377 difficult due to possibly infinite loops. */
2378 if (iv0.step == const0_rtx)
2380 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2381 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2382 mode_mmax);
2383 if (assumption == const_true_rtx)
2384 goto zero_iter_simplify;
2385 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2386 iv0.base, const1_rtx);
2388 else
2390 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2391 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2392 mode_mmin);
2393 if (assumption == const_true_rtx)
2394 goto zero_iter_simplify;
2395 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2396 iv1.base, constm1_rtx);
2399 if (assumption != const0_rtx)
2400 desc->noloop_assumptions =
2401 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2402 cond = (cond == LT) ? LE : LEU;
2404 /* It will be useful to be able to tell the difference once more in
2405 LE -> NE reduction. */
2406 was_sharp = true;
2407 break;
2408 default: ;
2411 /* Take care of trivially infinite loops. */
2412 if (cond != NE)
2414 if (iv0.step == const0_rtx)
2416 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2417 if (rtx_equal_p (tmp, mode_mmin))
2419 desc->infinite =
2420 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2421 /* Fill in the remaining fields somehow. */
2422 goto zero_iter_simplify;
2425 else
2427 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2428 if (rtx_equal_p (tmp, mode_mmax))
2430 desc->infinite =
2431 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2432 /* Fill in the remaining fields somehow. */
2433 goto zero_iter_simplify;
2438 /* If we can we want to take care of NE conditions instead of size
2439 comparisons, as they are much more friendly (most importantly
2440 this takes care of special handling of loops with step 1). We can
2441 do it if we first check that upper bound is greater or equal to
2442 lower bound, their difference is constant c modulo step and that
2443 there is not an overflow. */
2444 if (cond != NE)
2446 if (iv0.step == const0_rtx)
2447 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2448 else
2449 step = iv0.step;
2450 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2451 delta = lowpart_subreg (mode, delta, comp_mode);
2452 delta = simplify_gen_binary (UMOD, mode, delta, step);
2453 may_xform = const0_rtx;
2454 may_not_xform = const_true_rtx;
2456 if (GET_CODE (delta) == CONST_INT)
2458 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2460 /* A special case. We have transformed condition of type
2461 for (i = 0; i < 4; i += 4)
2462 into
2463 for (i = 0; i <= 3; i += 4)
2464 obviously if the test for overflow during that transformation
2465 passed, we cannot overflow here. Most importantly any
2466 loop with sharp end condition and step 1 falls into this
2467 category, so handling this case specially is definitely
2468 worth the troubles. */
2469 may_xform = const_true_rtx;
2471 else if (iv0.step == const0_rtx)
2473 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2474 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2475 bound = lowpart_subreg (mode, bound, comp_mode);
2476 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2477 may_xform = simplify_gen_relational (cond, SImode, mode,
2478 bound, tmp);
2479 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2480 SImode, mode,
2481 bound, tmp);
2483 else
2485 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2486 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2487 bound = lowpart_subreg (mode, bound, comp_mode);
2488 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2489 may_xform = simplify_gen_relational (cond, SImode, mode,
2490 tmp, bound);
2491 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2492 SImode, mode,
2493 tmp, bound);
2497 if (may_xform != const0_rtx)
2499 /* We perform the transformation always provided that it is not
2500 completely senseless. This is OK, as we would need this assumption
2501 to determine the number of iterations anyway. */
2502 if (may_xform != const_true_rtx)
2504 /* If the step is a power of two and the final value we have
2505 computed overflows, the cycle is infinite. Otherwise it
2506 is nontrivial to compute the number of iterations. */
2507 if (step_is_pow2)
2508 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2509 desc->infinite);
2510 else
2511 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2512 desc->assumptions);
2515 /* We are going to lose some information about upper bound on
2516 number of iterations in this step, so record the information
2517 here. */
2518 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2519 if (GET_CODE (iv1.base) == CONST_INT)
2520 up = INTVAL (iv1.base);
2521 else
2522 up = INTVAL (mode_mmax) - inc;
2523 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2524 ? iv0.base
2525 : mode_mmin);
2526 desc->niter_max = (up - down) / inc + 1;
2528 if (iv0.step == const0_rtx)
2530 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2531 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2533 else
2535 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2536 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2539 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2540 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2541 assumption = simplify_gen_relational (reverse_condition (cond),
2542 SImode, mode, tmp0, tmp1);
2543 if (assumption == const_true_rtx)
2544 goto zero_iter_simplify;
2545 else if (assumption != const0_rtx)
2546 desc->noloop_assumptions =
2547 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2548 cond = NE;
2552 /* Count the number of iterations. */
2553 if (cond == NE)
2555 /* Everything we do here is just arithmetics modulo size of mode. This
2556 makes us able to do more involved computations of number of iterations
2557 than in other cases. First transform the condition into shape
2558 s * i <> c, with s positive. */
2559 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2560 iv0.base = const0_rtx;
2561 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2562 iv1.step = const0_rtx;
2563 if (INTVAL (iv0.step) < 0)
2565 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2566 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2568 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2570 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2571 is infinite. Otherwise, the number of iterations is
2572 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2573 s = INTVAL (iv0.step); d = 1;
2574 while (s % 2 != 1)
2576 s /= 2;
2577 d *= 2;
2578 size--;
2580 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2582 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2583 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2584 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2585 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2587 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2588 inv = inverse (s, size);
2589 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2590 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2592 else
2594 if (iv1.step == const0_rtx)
2595 /* Condition in shape a + s * i <= b
2596 We must know that b + s does not overflow and a <= b + s and then we
2597 can compute number of iterations as (b + s - a) / s. (It might
2598 seem that we in fact could be more clever about testing the b + s
2599 overflow condition using some information about b - a mod s,
2600 but it was already taken into account during LE -> NE transform). */
2602 step = iv0.step;
2603 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2604 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2606 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2607 lowpart_subreg (mode, step,
2608 comp_mode));
2609 if (step_is_pow2)
2611 rtx t0, t1;
2613 /* If s is power of 2, we know that the loop is infinite if
2614 a % s <= b % s and b + s overflows. */
2615 assumption = simplify_gen_relational (reverse_condition (cond),
2616 SImode, mode,
2617 tmp1, bound);
2619 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2620 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2621 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2622 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2623 desc->infinite =
2624 alloc_EXPR_LIST (0, assumption, desc->infinite);
2626 else
2628 assumption = simplify_gen_relational (cond, SImode, mode,
2629 tmp1, bound);
2630 desc->assumptions =
2631 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2634 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2635 tmp = lowpart_subreg (mode, tmp, comp_mode);
2636 assumption = simplify_gen_relational (reverse_condition (cond),
2637 SImode, mode, tmp0, tmp);
2639 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2640 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2642 else
2644 /* Condition in shape a <= b - s * i
2645 We must know that a - s does not overflow and a - s <= b and then
2646 we can again compute number of iterations as (b - (a - s)) / s. */
2647 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2648 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2649 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2651 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2652 lowpart_subreg (mode, step, comp_mode));
2653 if (step_is_pow2)
2655 rtx t0, t1;
2657 /* If s is power of 2, we know that the loop is infinite if
2658 a % s <= b % s and a - s overflows. */
2659 assumption = simplify_gen_relational (reverse_condition (cond),
2660 SImode, mode,
2661 bound, tmp0);
2663 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2664 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2665 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2666 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2667 desc->infinite =
2668 alloc_EXPR_LIST (0, assumption, desc->infinite);
2670 else
2672 assumption = simplify_gen_relational (cond, SImode, mode,
2673 bound, tmp0);
2674 desc->assumptions =
2675 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2678 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2679 tmp = lowpart_subreg (mode, tmp, comp_mode);
2680 assumption = simplify_gen_relational (reverse_condition (cond),
2681 SImode, mode,
2682 tmp, tmp1);
2683 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2684 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2686 if (assumption == const_true_rtx)
2687 goto zero_iter_simplify;
2688 else if (assumption != const0_rtx)
2689 desc->noloop_assumptions =
2690 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2691 delta = simplify_gen_binary (UDIV, mode, delta, step);
2692 desc->niter_expr = delta;
2695 old_niter = desc->niter_expr;
2697 simplify_using_initial_values (loop, AND, &desc->assumptions);
2698 if (desc->assumptions
2699 && XEXP (desc->assumptions, 0) == const0_rtx)
2700 goto fail;
2701 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2702 simplify_using_initial_values (loop, IOR, &desc->infinite);
2703 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2705 /* Rerun the simplification. Consider code (created by copying loop headers)
2707 i = 0;
2709 if (0 < n)
2713 i++;
2714 } while (i < n);
2717 The first pass determines that i = 0, the second pass uses it to eliminate
2718 noloop assumption. */
2720 simplify_using_initial_values (loop, AND, &desc->assumptions);
2721 if (desc->assumptions
2722 && XEXP (desc->assumptions, 0) == const0_rtx)
2723 goto fail;
2724 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2725 simplify_using_initial_values (loop, IOR, &desc->infinite);
2726 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2728 if (desc->noloop_assumptions
2729 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2730 goto zero_iter;
2732 if (GET_CODE (desc->niter_expr) == CONST_INT)
2734 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2736 desc->const_iter = true;
2737 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2739 else
2741 if (!desc->niter_max)
2742 desc->niter_max = determine_max_iter (loop, desc, old_niter);
2744 /* simplify_using_initial_values does a copy propagation on the registers
2745 in the expression for the number of iterations. This prolongs life
2746 ranges of registers and increases register pressure, and usually
2747 brings no gain (and if it happens to do, the cse pass will take care
2748 of it anyway). So prevent this behavior, unless it enabled us to
2749 derive that the number of iterations is a constant. */
2750 desc->niter_expr = old_niter;
2753 return;
2755 zero_iter_simplify:
2756 /* Simplify the assumptions. */
2757 simplify_using_initial_values (loop, AND, &desc->assumptions);
2758 if (desc->assumptions
2759 && XEXP (desc->assumptions, 0) == const0_rtx)
2760 goto fail;
2761 simplify_using_initial_values (loop, IOR, &desc->infinite);
2763 /* Fallthru. */
2764 zero_iter:
2765 desc->const_iter = true;
2766 desc->niter = 0;
2767 desc->niter_max = 0;
2768 desc->noloop_assumptions = NULL_RTX;
2769 desc->niter_expr = const0_rtx;
2770 return;
2772 fail:
2773 desc->simple_p = false;
2774 return;
2777 /* Checks whether E is a simple exit from LOOP and stores its description
2778 into DESC. */
2780 static void
2781 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2783 basic_block exit_bb;
2784 rtx condition, at;
2785 edge ein;
2787 exit_bb = e->src;
2788 desc->simple_p = false;
2790 /* It must belong directly to the loop. */
2791 if (exit_bb->loop_father != loop)
2792 return;
2794 /* It must be tested (at least) once during any iteration. */
2795 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2796 return;
2798 /* It must end in a simple conditional jump. */
2799 if (!any_condjump_p (BB_END (exit_bb)))
2800 return;
2802 ein = EDGE_SUCC (exit_bb, 0);
2803 if (ein == e)
2804 ein = EDGE_SUCC (exit_bb, 1);
2806 desc->out_edge = e;
2807 desc->in_edge = ein;
2809 /* Test whether the condition is suitable. */
2810 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2811 return;
2813 if (ein->flags & EDGE_FALLTHRU)
2815 condition = reversed_condition (condition);
2816 if (!condition)
2817 return;
2820 /* Check that we are able to determine number of iterations and fill
2821 in information about it. */
2822 iv_number_of_iterations (loop, at, condition, desc);
2825 /* Finds a simple exit of LOOP and stores its description into DESC. */
2827 void
2828 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2830 unsigned i;
2831 basic_block *body;
2832 edge e;
2833 struct niter_desc act;
2834 bool any = false;
2835 edge_iterator ei;
2837 desc->simple_p = false;
2838 body = get_loop_body (loop);
2840 for (i = 0; i < loop->num_nodes; i++)
2842 FOR_EACH_EDGE (e, ei, body[i]->succs)
2844 if (flow_bb_inside_loop_p (loop, e->dest))
2845 continue;
2847 check_simple_exit (loop, e, &act);
2848 if (!act.simple_p)
2849 continue;
2851 if (!any)
2852 any = true;
2853 else
2855 /* Prefer constant iterations; the less the better. */
2856 if (!act.const_iter
2857 || (desc->const_iter && act.niter >= desc->niter))
2858 continue;
2860 /* Also if the actual exit may be infinite, while the old one
2861 not, prefer the old one. */
2862 if (act.infinite && !desc->infinite)
2863 continue;
2866 *desc = act;
2870 if (dump_file)
2872 if (desc->simple_p)
2874 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2875 fprintf (dump_file, " simple exit %d -> %d\n",
2876 desc->out_edge->src->index,
2877 desc->out_edge->dest->index);
2878 if (desc->assumptions)
2880 fprintf (dump_file, " assumptions: ");
2881 print_rtl (dump_file, desc->assumptions);
2882 fprintf (dump_file, "\n");
2884 if (desc->noloop_assumptions)
2886 fprintf (dump_file, " does not roll if: ");
2887 print_rtl (dump_file, desc->noloop_assumptions);
2888 fprintf (dump_file, "\n");
2890 if (desc->infinite)
2892 fprintf (dump_file, " infinite if: ");
2893 print_rtl (dump_file, desc->infinite);
2894 fprintf (dump_file, "\n");
2897 fprintf (dump_file, " number of iterations: ");
2898 print_rtl (dump_file, desc->niter_expr);
2899 fprintf (dump_file, "\n");
2901 fprintf (dump_file, " upper bound: ");
2902 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2903 fprintf (dump_file, "\n");
2905 else
2906 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2909 free (body);
2912 /* Creates a simple loop description of LOOP if it was not computed
2913 already. */
2915 struct niter_desc *
2916 get_simple_loop_desc (struct loop *loop)
2918 struct niter_desc *desc = simple_loop_desc (loop);
2920 if (desc)
2921 return desc;
2923 /* At least desc->infinite is not always initialized by
2924 find_simple_loop_exit. */
2925 desc = XCNEW (struct niter_desc);
2926 iv_analysis_loop_init (loop);
2927 find_simple_exit (loop, desc);
2928 loop->aux = desc;
2930 if (desc->simple_p && (desc->assumptions || desc->infinite))
2932 const char *wording;
2934 /* Assume that no overflow happens and that the loop is finite.
2935 We already warned at the tree level if we ran optimizations there. */
2936 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
2938 if (desc->infinite)
2940 wording =
2941 flag_unsafe_loop_optimizations
2942 ? N_("assuming that the loop is not infinite")
2943 : N_("cannot optimize possibly infinite loops");
2944 warning (OPT_Wunsafe_loop_optimizations, "%s",
2945 gettext (wording));
2947 if (desc->assumptions)
2949 wording =
2950 flag_unsafe_loop_optimizations
2951 ? N_("assuming that the loop counter does not overflow")
2952 : N_("cannot optimize loop, the loop counter may overflow");
2953 warning (OPT_Wunsafe_loop_optimizations, "%s",
2954 gettext (wording));
2958 if (flag_unsafe_loop_optimizations)
2960 desc->assumptions = NULL_RTX;
2961 desc->infinite = NULL_RTX;
2965 return desc;
2968 /* Releases simple loop description for LOOP. */
2970 void
2971 free_simple_loop_desc (struct loop *loop)
2973 struct niter_desc *desc = simple_loop_desc (loop);
2975 if (!desc)
2976 return;
2978 free (desc);
2979 loop->aux = NULL;