compiler: Create dummy labels for blank labels.
[official-gcc.git] / gcc / loop-iv.c
blobcafde4ab83b31b11de39a80dbe0acf4e22d9b3b6
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is a simple analysis of induction variables of the loop. The major use
21 is for determining the number of iterations of a loop for loop unrolling,
22 doloop optimization and branch prediction. The iv information is computed
23 on demand.
25 Induction variables are analyzed by walking the use-def chains. When
26 a basic induction variable (biv) is found, it is cached in the bivs
27 hash table. When register is proved to be a biv, its description
28 is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "backend.h"
54 #include "tree.h"
55 #include "rtl.h"
56 #include "df.h"
57 #include "cfgloop.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "calls.h"
65 #include "emit-rtl.h"
66 #include "varasm.h"
67 #include "stmt.h"
68 #include "expr.h"
69 #include "intl.h"
70 #include "diagnostic-core.h"
71 #include "dumpfile.h"
72 #include "rtl-iter.h"
74 /* Possible return values of iv_get_reaching_def. */
76 enum iv_grd_result
78 /* More than one reaching def, or reaching def that does not
79 dominate the use. */
80 GRD_INVALID,
82 /* The use is trivial invariant of the loop, i.e. is not changed
83 inside the loop. */
84 GRD_INVARIANT,
86 /* The use is reached by initial value and a value from the
87 previous iteration. */
88 GRD_MAYBE_BIV,
90 /* The use has single dominating def. */
91 GRD_SINGLE_DOM
94 /* Information about a biv. */
96 struct biv_entry
98 unsigned regno; /* The register of the biv. */
99 struct rtx_iv iv; /* Value of the biv. */
102 static bool clean_slate = true;
104 static unsigned int iv_ref_table_size = 0;
106 /* Table of rtx_ivs indexed by the df_ref uid field. */
107 static struct rtx_iv ** iv_ref_table;
109 /* Induction variable stored at the reference. */
110 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID (REF)]
111 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID (REF)] = (IV)
113 /* The current loop. */
115 static struct loop *current_loop;
117 /* Hashtable helper. */
119 struct biv_entry_hasher : free_ptr_hash <biv_entry>
121 typedef rtx_def *compare_type;
122 static inline hashval_t hash (const biv_entry *);
123 static inline bool equal (const biv_entry *, const rtx_def *);
126 /* Returns hash value for biv B. */
128 inline hashval_t
129 biv_entry_hasher::hash (const biv_entry *b)
131 return b->regno;
134 /* Compares biv B and register R. */
136 inline bool
137 biv_entry_hasher::equal (const biv_entry *b, const rtx_def *r)
139 return b->regno == REGNO (r);
142 /* Bivs of the current loop. */
144 static hash_table<biv_entry_hasher> *bivs;
146 static bool iv_analyze_op (rtx_insn *, rtx, struct rtx_iv *);
148 /* Return the RTX code corresponding to the IV extend code EXTEND. */
149 static inline enum rtx_code
150 iv_extend_to_rtx_code (enum iv_extend_code extend)
152 switch (extend)
154 case IV_SIGN_EXTEND:
155 return SIGN_EXTEND;
156 case IV_ZERO_EXTEND:
157 return ZERO_EXTEND;
158 case IV_UNKNOWN_EXTEND:
159 return UNKNOWN;
161 gcc_unreachable ();
164 /* Dumps information about IV to FILE. */
166 extern void dump_iv_info (FILE *, struct rtx_iv *);
167 void
168 dump_iv_info (FILE *file, struct rtx_iv *iv)
170 if (!iv->base)
172 fprintf (file, "not simple");
173 return;
176 if (iv->step == const0_rtx
177 && !iv->first_special)
178 fprintf (file, "invariant ");
180 print_rtl (file, iv->base);
181 if (iv->step != const0_rtx)
183 fprintf (file, " + ");
184 print_rtl (file, iv->step);
185 fprintf (file, " * iteration");
187 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
189 if (iv->mode != iv->extend_mode)
190 fprintf (file, " %s to %s",
191 rtx_name[iv_extend_to_rtx_code (iv->extend)],
192 GET_MODE_NAME (iv->extend_mode));
194 if (iv->mult != const1_rtx)
196 fprintf (file, " * ");
197 print_rtl (file, iv->mult);
199 if (iv->delta != const0_rtx)
201 fprintf (file, " + ");
202 print_rtl (file, iv->delta);
204 if (iv->first_special)
205 fprintf (file, " (first special)");
208 /* Generates a subreg to get the least significant part of EXPR (in mode
209 INNER_MODE) to OUTER_MODE. */
212 lowpart_subreg (machine_mode outer_mode, rtx expr,
213 machine_mode inner_mode)
215 return simplify_gen_subreg (outer_mode, expr, inner_mode,
216 subreg_lowpart_offset (outer_mode, inner_mode));
219 static void
220 check_iv_ref_table_size (void)
222 if (iv_ref_table_size < DF_DEFS_TABLE_SIZE ())
224 unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
225 iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
226 memset (&iv_ref_table[iv_ref_table_size], 0,
227 (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
228 iv_ref_table_size = new_size;
233 /* Checks whether REG is a well-behaved register. */
235 static bool
236 simple_reg_p (rtx reg)
238 unsigned r;
240 if (GET_CODE (reg) == SUBREG)
242 if (!subreg_lowpart_p (reg))
243 return false;
244 reg = SUBREG_REG (reg);
247 if (!REG_P (reg))
248 return false;
250 r = REGNO (reg);
251 if (HARD_REGISTER_NUM_P (r))
252 return false;
254 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
255 return false;
257 return true;
260 /* Clears the information about ivs stored in df. */
262 static void
263 clear_iv_info (void)
265 unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
266 struct rtx_iv *iv;
268 check_iv_ref_table_size ();
269 for (i = 0; i < n_defs; i++)
271 iv = iv_ref_table[i];
272 if (iv)
274 free (iv);
275 iv_ref_table[i] = NULL;
279 bivs->empty ();
283 /* Prepare the data for an induction variable analysis of a LOOP. */
285 void
286 iv_analysis_loop_init (struct loop *loop)
288 current_loop = loop;
290 /* Clear the information from the analysis of the previous loop. */
291 if (clean_slate)
293 df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
294 bivs = new hash_table<biv_entry_hasher> (10);
295 clean_slate = false;
297 else
298 clear_iv_info ();
300 /* Get rid of the ud chains before processing the rescans. Then add
301 the problem back. */
302 df_remove_problem (df_chain);
303 df_process_deferred_rescans ();
304 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
305 df_chain_add_problem (DF_UD_CHAIN);
306 df_note_add_problem ();
307 df_analyze_loop (loop);
308 if (dump_file)
309 df_dump_region (dump_file);
311 check_iv_ref_table_size ();
314 /* Finds the definition of REG that dominates loop latch and stores
315 it to DEF. Returns false if there is not a single definition
316 dominating the latch. If REG has no definition in loop, DEF
317 is set to NULL and true is returned. */
319 static bool
320 latch_dominating_def (rtx reg, df_ref *def)
322 df_ref single_rd = NULL, adef;
323 unsigned regno = REGNO (reg);
324 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
326 for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
328 if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
329 || !bitmap_bit_p (&bb_info->out, DF_REF_ID (adef)))
330 continue;
332 /* More than one reaching definition. */
333 if (single_rd)
334 return false;
336 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
337 return false;
339 single_rd = adef;
342 *def = single_rd;
343 return true;
346 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
348 static enum iv_grd_result
349 iv_get_reaching_def (rtx_insn *insn, rtx reg, df_ref *def)
351 df_ref use, adef;
352 basic_block def_bb, use_bb;
353 rtx_insn *def_insn;
354 bool dom_p;
356 *def = NULL;
357 if (!simple_reg_p (reg))
358 return GRD_INVALID;
359 if (GET_CODE (reg) == SUBREG)
360 reg = SUBREG_REG (reg);
361 gcc_assert (REG_P (reg));
363 use = df_find_use (insn, reg);
364 gcc_assert (use != NULL);
366 if (!DF_REF_CHAIN (use))
367 return GRD_INVARIANT;
369 /* More than one reaching def. */
370 if (DF_REF_CHAIN (use)->next)
371 return GRD_INVALID;
373 adef = DF_REF_CHAIN (use)->ref;
375 /* We do not handle setting only part of the register. */
376 if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
377 return GRD_INVALID;
379 def_insn = DF_REF_INSN (adef);
380 def_bb = DF_REF_BB (adef);
381 use_bb = BLOCK_FOR_INSN (insn);
383 if (use_bb == def_bb)
384 dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
385 else
386 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
388 if (dom_p)
390 *def = adef;
391 return GRD_SINGLE_DOM;
394 /* The definition does not dominate the use. This is still OK if
395 this may be a use of a biv, i.e. if the def_bb dominates loop
396 latch. */
397 if (just_once_each_iteration_p (current_loop, def_bb))
398 return GRD_MAYBE_BIV;
400 return GRD_INVALID;
403 /* Sets IV to invariant CST in MODE. Always returns true (just for
404 consistency with other iv manipulation functions that may fail). */
406 static bool
407 iv_constant (struct rtx_iv *iv, rtx cst, machine_mode mode)
409 if (mode == VOIDmode)
410 mode = GET_MODE (cst);
412 iv->mode = mode;
413 iv->base = cst;
414 iv->step = const0_rtx;
415 iv->first_special = false;
416 iv->extend = IV_UNKNOWN_EXTEND;
417 iv->extend_mode = iv->mode;
418 iv->delta = const0_rtx;
419 iv->mult = const1_rtx;
421 return true;
424 /* Evaluates application of subreg to MODE on IV. */
426 static bool
427 iv_subreg (struct rtx_iv *iv, machine_mode mode)
429 /* If iv is invariant, just calculate the new value. */
430 if (iv->step == const0_rtx
431 && !iv->first_special)
433 rtx val = get_iv_value (iv, const0_rtx);
434 val = lowpart_subreg (mode, val,
435 iv->extend == IV_UNKNOWN_EXTEND
436 ? iv->mode : iv->extend_mode);
438 iv->base = val;
439 iv->extend = IV_UNKNOWN_EXTEND;
440 iv->mode = iv->extend_mode = mode;
441 iv->delta = const0_rtx;
442 iv->mult = const1_rtx;
443 return true;
446 if (iv->extend_mode == mode)
447 return true;
449 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
450 return false;
452 iv->extend = IV_UNKNOWN_EXTEND;
453 iv->mode = mode;
455 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
456 simplify_gen_binary (MULT, iv->extend_mode,
457 iv->base, iv->mult));
458 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
459 iv->mult = const1_rtx;
460 iv->delta = const0_rtx;
461 iv->first_special = false;
463 return true;
466 /* Evaluates application of EXTEND to MODE on IV. */
468 static bool
469 iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, machine_mode mode)
471 /* If iv is invariant, just calculate the new value. */
472 if (iv->step == const0_rtx
473 && !iv->first_special)
475 rtx val = get_iv_value (iv, const0_rtx);
476 if (iv->extend_mode != iv->mode
477 && iv->extend != IV_UNKNOWN_EXTEND
478 && iv->extend != extend)
479 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
480 val = simplify_gen_unary (iv_extend_to_rtx_code (extend), mode,
481 val,
482 iv->extend == extend
483 ? iv->extend_mode : iv->mode);
484 iv->base = val;
485 iv->extend = IV_UNKNOWN_EXTEND;
486 iv->mode = iv->extend_mode = mode;
487 iv->delta = const0_rtx;
488 iv->mult = const1_rtx;
489 return true;
492 if (mode != iv->extend_mode)
493 return false;
495 if (iv->extend != IV_UNKNOWN_EXTEND
496 && iv->extend != extend)
497 return false;
499 iv->extend = extend;
501 return true;
504 /* Evaluates negation of IV. */
506 static bool
507 iv_neg (struct rtx_iv *iv)
509 if (iv->extend == IV_UNKNOWN_EXTEND)
511 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
512 iv->base, iv->extend_mode);
513 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
514 iv->step, iv->extend_mode);
516 else
518 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
519 iv->delta, iv->extend_mode);
520 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
521 iv->mult, iv->extend_mode);
524 return true;
527 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
529 static bool
530 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
532 machine_mode mode;
533 rtx arg;
535 /* Extend the constant to extend_mode of the other operand if necessary. */
536 if (iv0->extend == IV_UNKNOWN_EXTEND
537 && iv0->mode == iv0->extend_mode
538 && iv0->step == const0_rtx
539 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
541 iv0->extend_mode = iv1->extend_mode;
542 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
543 iv0->base, iv0->mode);
545 if (iv1->extend == IV_UNKNOWN_EXTEND
546 && iv1->mode == iv1->extend_mode
547 && iv1->step == const0_rtx
548 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
550 iv1->extend_mode = iv0->extend_mode;
551 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
552 iv1->base, iv1->mode);
555 mode = iv0->extend_mode;
556 if (mode != iv1->extend_mode)
557 return false;
559 if (iv0->extend == IV_UNKNOWN_EXTEND
560 && iv1->extend == IV_UNKNOWN_EXTEND)
562 if (iv0->mode != iv1->mode)
563 return false;
565 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
566 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
568 return true;
571 /* Handle addition of constant. */
572 if (iv1->extend == IV_UNKNOWN_EXTEND
573 && iv1->mode == mode
574 && iv1->step == const0_rtx)
576 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
577 return true;
580 if (iv0->extend == IV_UNKNOWN_EXTEND
581 && iv0->mode == mode
582 && iv0->step == const0_rtx)
584 arg = iv0->base;
585 *iv0 = *iv1;
586 if (op == MINUS
587 && !iv_neg (iv0))
588 return false;
590 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
591 return true;
594 return false;
597 /* Evaluates multiplication of IV by constant CST. */
599 static bool
600 iv_mult (struct rtx_iv *iv, rtx mby)
602 machine_mode mode = iv->extend_mode;
604 if (GET_MODE (mby) != VOIDmode
605 && GET_MODE (mby) != mode)
606 return false;
608 if (iv->extend == IV_UNKNOWN_EXTEND)
610 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
611 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
613 else
615 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
616 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
619 return true;
622 /* Evaluates shift of IV by constant CST. */
624 static bool
625 iv_shift (struct rtx_iv *iv, rtx mby)
627 machine_mode mode = iv->extend_mode;
629 if (GET_MODE (mby) != VOIDmode
630 && GET_MODE (mby) != mode)
631 return false;
633 if (iv->extend == IV_UNKNOWN_EXTEND)
635 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
636 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
638 else
640 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
641 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
644 return true;
647 /* The recursive part of get_biv_step. Gets the value of the single value
648 defined by DEF wrto initial value of REG inside loop, in shape described
649 at get_biv_step. */
651 static bool
652 get_biv_step_1 (df_ref def, rtx reg,
653 rtx *inner_step, machine_mode *inner_mode,
654 enum iv_extend_code *extend, machine_mode outer_mode,
655 rtx *outer_step)
657 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
658 rtx next, nextr;
659 enum rtx_code code;
660 rtx_insn *insn = DF_REF_INSN (def);
661 df_ref next_def;
662 enum iv_grd_result res;
664 set = single_set (insn);
665 if (!set)
666 return false;
668 rhs = find_reg_equal_equiv_note (insn);
669 if (rhs)
670 rhs = XEXP (rhs, 0);
671 else
672 rhs = SET_SRC (set);
674 code = GET_CODE (rhs);
675 switch (code)
677 case SUBREG:
678 case REG:
679 next = rhs;
680 break;
682 case PLUS:
683 case MINUS:
684 op0 = XEXP (rhs, 0);
685 op1 = XEXP (rhs, 1);
687 if (code == PLUS && CONSTANT_P (op0))
688 std::swap (op0, op1);
690 if (!simple_reg_p (op0)
691 || !CONSTANT_P (op1))
692 return false;
694 if (GET_MODE (rhs) != outer_mode)
696 /* ppc64 uses expressions like
698 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
700 this is equivalent to
702 (set x':DI (plus:DI y:DI 1))
703 (set x:SI (subreg:SI (x':DI)). */
704 if (GET_CODE (op0) != SUBREG)
705 return false;
706 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
707 return false;
710 next = op0;
711 break;
713 case SIGN_EXTEND:
714 case ZERO_EXTEND:
715 if (GET_MODE (rhs) != outer_mode)
716 return false;
718 op0 = XEXP (rhs, 0);
719 if (!simple_reg_p (op0))
720 return false;
722 next = op0;
723 break;
725 default:
726 return false;
729 if (GET_CODE (next) == SUBREG)
731 if (!subreg_lowpart_p (next))
732 return false;
734 nextr = SUBREG_REG (next);
735 if (GET_MODE (nextr) != outer_mode)
736 return false;
738 else
739 nextr = next;
741 res = iv_get_reaching_def (insn, nextr, &next_def);
743 if (res == GRD_INVALID || res == GRD_INVARIANT)
744 return false;
746 if (res == GRD_MAYBE_BIV)
748 if (!rtx_equal_p (nextr, reg))
749 return false;
751 *inner_step = const0_rtx;
752 *extend = IV_UNKNOWN_EXTEND;
753 *inner_mode = outer_mode;
754 *outer_step = const0_rtx;
756 else if (!get_biv_step_1 (next_def, reg,
757 inner_step, inner_mode, extend, outer_mode,
758 outer_step))
759 return false;
761 if (GET_CODE (next) == SUBREG)
763 machine_mode amode = GET_MODE (next);
765 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
766 return false;
768 *inner_mode = amode;
769 *inner_step = simplify_gen_binary (PLUS, outer_mode,
770 *inner_step, *outer_step);
771 *outer_step = const0_rtx;
772 *extend = IV_UNKNOWN_EXTEND;
775 switch (code)
777 case REG:
778 case SUBREG:
779 break;
781 case PLUS:
782 case MINUS:
783 if (*inner_mode == outer_mode
784 /* See comment in previous switch. */
785 || GET_MODE (rhs) != outer_mode)
786 *inner_step = simplify_gen_binary (code, outer_mode,
787 *inner_step, op1);
788 else
789 *outer_step = simplify_gen_binary (code, outer_mode,
790 *outer_step, op1);
791 break;
793 case SIGN_EXTEND:
794 case ZERO_EXTEND:
795 gcc_assert (GET_MODE (op0) == *inner_mode
796 && *extend == IV_UNKNOWN_EXTEND
797 && *outer_step == const0_rtx);
799 *extend = (code == SIGN_EXTEND) ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
800 break;
802 default:
803 return false;
806 return true;
809 /* Gets the operation on register REG inside loop, in shape
811 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
813 If the operation cannot be described in this shape, return false.
814 LAST_DEF is the definition of REG that dominates loop latch. */
816 static bool
817 get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
818 machine_mode *inner_mode, enum iv_extend_code *extend,
819 machine_mode *outer_mode, rtx *outer_step)
821 *outer_mode = GET_MODE (reg);
823 if (!get_biv_step_1 (last_def, reg,
824 inner_step, inner_mode, extend, *outer_mode,
825 outer_step))
826 return false;
828 gcc_assert ((*inner_mode == *outer_mode) != (*extend != IV_UNKNOWN_EXTEND));
829 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
831 return true;
834 /* Records information that DEF is induction variable IV. */
836 static void
837 record_iv (df_ref def, struct rtx_iv *iv)
839 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
841 *recorded_iv = *iv;
842 check_iv_ref_table_size ();
843 DF_REF_IV_SET (def, recorded_iv);
846 /* If DEF was already analyzed for bivness, store the description of the biv to
847 IV and return true. Otherwise return false. */
849 static bool
850 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
852 struct biv_entry *biv = bivs->find_with_hash (def, REGNO (def));
854 if (!biv)
855 return false;
857 *iv = biv->iv;
858 return true;
861 static void
862 record_biv (rtx def, struct rtx_iv *iv)
864 struct biv_entry *biv = XNEW (struct biv_entry);
865 biv_entry **slot = bivs->find_slot_with_hash (def, REGNO (def), INSERT);
867 biv->regno = REGNO (def);
868 biv->iv = *iv;
869 gcc_assert (!*slot);
870 *slot = biv;
873 /* Determines whether DEF is a biv and if so, stores its description
874 to *IV. */
876 static bool
877 iv_analyze_biv (rtx def, struct rtx_iv *iv)
879 rtx inner_step, outer_step;
880 machine_mode inner_mode, outer_mode;
881 enum iv_extend_code extend;
882 df_ref last_def;
884 if (dump_file)
886 fprintf (dump_file, "Analyzing ");
887 print_rtl (dump_file, def);
888 fprintf (dump_file, " for bivness.\n");
891 if (!REG_P (def))
893 if (!CONSTANT_P (def))
894 return false;
896 return iv_constant (iv, def, VOIDmode);
899 if (!latch_dominating_def (def, &last_def))
901 if (dump_file)
902 fprintf (dump_file, " not simple.\n");
903 return false;
906 if (!last_def)
907 return iv_constant (iv, def, VOIDmode);
909 if (analyzed_for_bivness_p (def, iv))
911 if (dump_file)
912 fprintf (dump_file, " already analysed.\n");
913 return iv->base != NULL_RTX;
916 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
917 &outer_mode, &outer_step))
919 iv->base = NULL_RTX;
920 goto end;
923 /* Loop transforms base to es (base + inner_step) + outer_step,
924 where es means extend of subreg between inner_mode and outer_mode.
925 The corresponding induction variable is
927 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
929 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
930 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
931 iv->mode = inner_mode;
932 iv->extend_mode = outer_mode;
933 iv->extend = extend;
934 iv->mult = const1_rtx;
935 iv->delta = outer_step;
936 iv->first_special = inner_mode != outer_mode;
938 end:
939 if (dump_file)
941 fprintf (dump_file, " ");
942 dump_iv_info (dump_file, iv);
943 fprintf (dump_file, "\n");
946 record_biv (def, iv);
947 return iv->base != NULL_RTX;
950 /* Analyzes expression RHS used at INSN and stores the result to *IV.
951 The mode of the induction variable is MODE. */
953 bool
954 iv_analyze_expr (rtx_insn *insn, rtx rhs, machine_mode mode,
955 struct rtx_iv *iv)
957 rtx mby = NULL_RTX;
958 rtx op0 = NULL_RTX, op1 = NULL_RTX;
959 struct rtx_iv iv0, iv1;
960 enum rtx_code code = GET_CODE (rhs);
961 machine_mode omode = mode;
963 iv->mode = VOIDmode;
964 iv->base = NULL_RTX;
965 iv->step = NULL_RTX;
967 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
969 if (CONSTANT_P (rhs)
970 || REG_P (rhs)
971 || code == SUBREG)
973 if (!iv_analyze_op (insn, rhs, iv))
974 return false;
976 if (iv->mode == VOIDmode)
978 iv->mode = mode;
979 iv->extend_mode = mode;
982 return true;
985 switch (code)
987 case REG:
988 op0 = rhs;
989 break;
991 case SIGN_EXTEND:
992 case ZERO_EXTEND:
993 case NEG:
994 op0 = XEXP (rhs, 0);
995 omode = GET_MODE (op0);
996 break;
998 case PLUS:
999 case MINUS:
1000 op0 = XEXP (rhs, 0);
1001 op1 = XEXP (rhs, 1);
1002 break;
1004 case MULT:
1005 op0 = XEXP (rhs, 0);
1006 mby = XEXP (rhs, 1);
1007 if (!CONSTANT_P (mby))
1008 std::swap (op0, mby);
1009 if (!CONSTANT_P (mby))
1010 return false;
1011 break;
1013 case ASHIFT:
1014 op0 = XEXP (rhs, 0);
1015 mby = XEXP (rhs, 1);
1016 if (!CONSTANT_P (mby))
1017 return false;
1018 break;
1020 default:
1021 return false;
1024 if (op0
1025 && !iv_analyze_expr (insn, op0, omode, &iv0))
1026 return false;
1028 if (op1
1029 && !iv_analyze_expr (insn, op1, omode, &iv1))
1030 return false;
1032 switch (code)
1034 case SIGN_EXTEND:
1035 if (!iv_extend (&iv0, IV_SIGN_EXTEND, mode))
1036 return false;
1037 break;
1039 case ZERO_EXTEND:
1040 if (!iv_extend (&iv0, IV_ZERO_EXTEND, mode))
1041 return false;
1042 break;
1044 case NEG:
1045 if (!iv_neg (&iv0))
1046 return false;
1047 break;
1049 case PLUS:
1050 case MINUS:
1051 if (!iv_add (&iv0, &iv1, code))
1052 return false;
1053 break;
1055 case MULT:
1056 if (!iv_mult (&iv0, mby))
1057 return false;
1058 break;
1060 case ASHIFT:
1061 if (!iv_shift (&iv0, mby))
1062 return false;
1063 break;
1065 default:
1066 break;
1069 *iv = iv0;
1070 return iv->base != NULL_RTX;
1073 /* Analyzes iv DEF and stores the result to *IV. */
1075 static bool
1076 iv_analyze_def (df_ref def, struct rtx_iv *iv)
1078 rtx_insn *insn = DF_REF_INSN (def);
1079 rtx reg = DF_REF_REG (def);
1080 rtx set, rhs;
1082 if (dump_file)
1084 fprintf (dump_file, "Analyzing def of ");
1085 print_rtl (dump_file, reg);
1086 fprintf (dump_file, " in insn ");
1087 print_rtl_single (dump_file, insn);
1090 check_iv_ref_table_size ();
1091 if (DF_REF_IV (def))
1093 if (dump_file)
1094 fprintf (dump_file, " already analysed.\n");
1095 *iv = *DF_REF_IV (def);
1096 return iv->base != NULL_RTX;
1099 iv->mode = VOIDmode;
1100 iv->base = NULL_RTX;
1101 iv->step = NULL_RTX;
1103 if (!REG_P (reg))
1104 return false;
1106 set = single_set (insn);
1107 if (!set)
1108 return false;
1110 if (!REG_P (SET_DEST (set)))
1111 return false;
1113 gcc_assert (SET_DEST (set) == reg);
1114 rhs = find_reg_equal_equiv_note (insn);
1115 if (rhs)
1116 rhs = XEXP (rhs, 0);
1117 else
1118 rhs = SET_SRC (set);
1120 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1121 record_iv (def, iv);
1123 if (dump_file)
1125 print_rtl (dump_file, reg);
1126 fprintf (dump_file, " in insn ");
1127 print_rtl_single (dump_file, insn);
1128 fprintf (dump_file, " is ");
1129 dump_iv_info (dump_file, iv);
1130 fprintf (dump_file, "\n");
1133 return iv->base != NULL_RTX;
1136 /* Analyzes operand OP of INSN and stores the result to *IV. */
1138 static bool
1139 iv_analyze_op (rtx_insn *insn, rtx op, struct rtx_iv *iv)
1141 df_ref def = NULL;
1142 enum iv_grd_result res;
1144 if (dump_file)
1146 fprintf (dump_file, "Analyzing operand ");
1147 print_rtl (dump_file, op);
1148 fprintf (dump_file, " of insn ");
1149 print_rtl_single (dump_file, insn);
1152 if (function_invariant_p (op))
1153 res = GRD_INVARIANT;
1154 else if (GET_CODE (op) == SUBREG)
1156 if (!subreg_lowpart_p (op))
1157 return false;
1159 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1160 return false;
1162 return iv_subreg (iv, GET_MODE (op));
1164 else
1166 res = iv_get_reaching_def (insn, op, &def);
1167 if (res == GRD_INVALID)
1169 if (dump_file)
1170 fprintf (dump_file, " not simple.\n");
1171 return false;
1175 if (res == GRD_INVARIANT)
1177 iv_constant (iv, op, VOIDmode);
1179 if (dump_file)
1181 fprintf (dump_file, " ");
1182 dump_iv_info (dump_file, iv);
1183 fprintf (dump_file, "\n");
1185 return true;
1188 if (res == GRD_MAYBE_BIV)
1189 return iv_analyze_biv (op, iv);
1191 return iv_analyze_def (def, iv);
1194 /* Analyzes value VAL at INSN and stores the result to *IV. */
1196 bool
1197 iv_analyze (rtx_insn *insn, rtx val, struct rtx_iv *iv)
1199 rtx reg;
1201 /* We must find the insn in that val is used, so that we get to UD chains.
1202 Since the function is sometimes called on result of get_condition,
1203 this does not necessarily have to be directly INSN; scan also the
1204 following insns. */
1205 if (simple_reg_p (val))
1207 if (GET_CODE (val) == SUBREG)
1208 reg = SUBREG_REG (val);
1209 else
1210 reg = val;
1212 while (!df_find_use (insn, reg))
1213 insn = NEXT_INSN (insn);
1216 return iv_analyze_op (insn, val, iv);
1219 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1221 bool
1222 iv_analyze_result (rtx_insn *insn, rtx def, struct rtx_iv *iv)
1224 df_ref adef;
1226 adef = df_find_def (insn, def);
1227 if (!adef)
1228 return false;
1230 return iv_analyze_def (adef, iv);
1233 /* Checks whether definition of register REG in INSN is a basic induction
1234 variable. IV analysis must have been initialized (via a call to
1235 iv_analysis_loop_init) for this function to produce a result. */
1237 bool
1238 biv_p (rtx_insn *insn, rtx reg)
1240 struct rtx_iv iv;
1241 df_ref def, last_def;
1243 if (!simple_reg_p (reg))
1244 return false;
1246 def = df_find_def (insn, reg);
1247 gcc_assert (def != NULL);
1248 if (!latch_dominating_def (reg, &last_def))
1249 return false;
1250 if (last_def != def)
1251 return false;
1253 if (!iv_analyze_biv (reg, &iv))
1254 return false;
1256 return iv.step != const0_rtx;
1259 /* Calculates value of IV at ITERATION-th iteration. */
1262 get_iv_value (struct rtx_iv *iv, rtx iteration)
1264 rtx val;
1266 /* We would need to generate some if_then_else patterns, and so far
1267 it is not needed anywhere. */
1268 gcc_assert (!iv->first_special);
1270 if (iv->step != const0_rtx && iteration != const0_rtx)
1271 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1272 simplify_gen_binary (MULT, iv->extend_mode,
1273 iv->step, iteration));
1274 else
1275 val = iv->base;
1277 if (iv->extend_mode == iv->mode)
1278 return val;
1280 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1282 if (iv->extend == IV_UNKNOWN_EXTEND)
1283 return val;
1285 val = simplify_gen_unary (iv_extend_to_rtx_code (iv->extend),
1286 iv->extend_mode, val, iv->mode);
1287 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1288 simplify_gen_binary (MULT, iv->extend_mode,
1289 iv->mult, val));
1291 return val;
1294 /* Free the data for an induction variable analysis. */
1296 void
1297 iv_analysis_done (void)
1299 if (!clean_slate)
1301 clear_iv_info ();
1302 clean_slate = true;
1303 df_finish_pass (true);
1304 delete bivs;
1305 bivs = NULL;
1306 free (iv_ref_table);
1307 iv_ref_table = NULL;
1308 iv_ref_table_size = 0;
1312 /* Computes inverse to X modulo (1 << MOD). */
1314 static uint64_t
1315 inverse (uint64_t x, int mod)
1317 uint64_t mask =
1318 ((uint64_t) 1 << (mod - 1) << 1) - 1;
1319 uint64_t rslt = 1;
1320 int i;
1322 for (i = 0; i < mod - 1; i++)
1324 rslt = (rslt * x) & mask;
1325 x = (x * x) & mask;
1328 return rslt;
1331 /* Checks whether any register in X is in set ALT. */
1333 static bool
1334 altered_reg_used (const_rtx x, bitmap alt)
1336 subrtx_iterator::array_type array;
1337 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1339 const_rtx x = *iter;
1340 if (REG_P (x) && REGNO_REG_SET_P (alt, REGNO (x)))
1341 return true;
1343 return false;
1346 /* Marks registers altered by EXPR in set ALT. */
1348 static void
1349 mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1351 if (GET_CODE (expr) == SUBREG)
1352 expr = SUBREG_REG (expr);
1353 if (!REG_P (expr))
1354 return;
1356 SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1359 /* Checks whether RHS is simple enough to process. */
1361 static bool
1362 simple_rhs_p (rtx rhs)
1364 rtx op0, op1;
1366 if (function_invariant_p (rhs)
1367 || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1368 return true;
1370 switch (GET_CODE (rhs))
1372 case PLUS:
1373 case MINUS:
1374 case AND:
1375 op0 = XEXP (rhs, 0);
1376 op1 = XEXP (rhs, 1);
1377 /* Allow reg OP const and reg OP reg. */
1378 if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1379 && !function_invariant_p (op0))
1380 return false;
1381 if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1382 && !function_invariant_p (op1))
1383 return false;
1385 return true;
1387 case ASHIFT:
1388 case ASHIFTRT:
1389 case LSHIFTRT:
1390 case MULT:
1391 op0 = XEXP (rhs, 0);
1392 op1 = XEXP (rhs, 1);
1393 /* Allow reg OP const. */
1394 if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1395 return false;
1396 if (!function_invariant_p (op1))
1397 return false;
1399 return true;
1401 default:
1402 return false;
1406 /* If REGNO has a single definition, return its known value, otherwise return
1407 null. */
1409 static rtx
1410 find_single_def_src (unsigned int regno)
1412 df_ref adef;
1413 rtx set, src;
1415 for (;;)
1417 rtx note;
1418 adef = DF_REG_DEF_CHAIN (regno);
1419 if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1420 || DF_REF_IS_ARTIFICIAL (adef))
1421 return NULL_RTX;
1423 set = single_set (DF_REF_INSN (adef));
1424 if (set == NULL || !REG_P (SET_DEST (set))
1425 || REGNO (SET_DEST (set)) != regno)
1426 return NULL_RTX;
1428 note = find_reg_equal_equiv_note (DF_REF_INSN (adef));
1430 if (note && function_invariant_p (XEXP (note, 0)))
1432 src = XEXP (note, 0);
1433 break;
1435 src = SET_SRC (set);
1437 if (REG_P (src))
1439 regno = REGNO (src);
1440 continue;
1442 break;
1444 if (!function_invariant_p (src))
1445 return NULL_RTX;
1447 return src;
1450 /* If any registers in *EXPR that have a single definition, try to replace
1451 them with the known-equivalent values. */
1453 static void
1454 replace_single_def_regs (rtx *expr)
1456 subrtx_var_iterator::array_type array;
1457 repeat:
1458 FOR_EACH_SUBRTX_VAR (iter, array, *expr, NONCONST)
1460 rtx x = *iter;
1461 if (REG_P (x))
1462 if (rtx new_x = find_single_def_src (REGNO (x)))
1464 *expr = simplify_replace_rtx (*expr, x, new_x);
1465 goto repeat;
1470 /* A subroutine of simplify_using_initial_values, this function examines INSN
1471 to see if it contains a suitable set that we can use to make a replacement.
1472 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1473 the set; return false otherwise. */
1475 static bool
1476 suitable_set_for_replacement (rtx_insn *insn, rtx *dest, rtx *src)
1478 rtx set = single_set (insn);
1479 rtx lhs = NULL_RTX, rhs;
1481 if (!set)
1482 return false;
1484 lhs = SET_DEST (set);
1485 if (!REG_P (lhs))
1486 return false;
1488 rhs = find_reg_equal_equiv_note (insn);
1489 if (rhs)
1490 rhs = XEXP (rhs, 0);
1491 else
1492 rhs = SET_SRC (set);
1494 if (!simple_rhs_p (rhs))
1495 return false;
1497 *dest = lhs;
1498 *src = rhs;
1499 return true;
1502 /* Using the data returned by suitable_set_for_replacement, replace DEST
1503 with SRC in *EXPR and return the new expression. Also call
1504 replace_single_def_regs if the replacement changed something. */
1505 static void
1506 replace_in_expr (rtx *expr, rtx dest, rtx src)
1508 rtx old = *expr;
1509 *expr = simplify_replace_rtx (*expr, dest, src);
1510 if (old == *expr)
1511 return;
1512 replace_single_def_regs (expr);
1515 /* Checks whether A implies B. */
1517 static bool
1518 implies_p (rtx a, rtx b)
1520 rtx op0, op1, opb0, opb1;
1521 machine_mode mode;
1523 if (rtx_equal_p (a, b))
1524 return true;
1526 if (GET_CODE (a) == EQ)
1528 op0 = XEXP (a, 0);
1529 op1 = XEXP (a, 1);
1531 if (REG_P (op0)
1532 || (GET_CODE (op0) == SUBREG
1533 && REG_P (SUBREG_REG (op0))))
1535 rtx r = simplify_replace_rtx (b, op0, op1);
1536 if (r == const_true_rtx)
1537 return true;
1540 if (REG_P (op1)
1541 || (GET_CODE (op1) == SUBREG
1542 && REG_P (SUBREG_REG (op1))))
1544 rtx r = simplify_replace_rtx (b, op1, op0);
1545 if (r == const_true_rtx)
1546 return true;
1550 if (b == const_true_rtx)
1551 return true;
1553 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1554 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1555 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1556 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1557 return false;
1559 op0 = XEXP (a, 0);
1560 op1 = XEXP (a, 1);
1561 opb0 = XEXP (b, 0);
1562 opb1 = XEXP (b, 1);
1564 mode = GET_MODE (op0);
1565 if (mode != GET_MODE (opb0))
1566 mode = VOIDmode;
1567 else if (mode == VOIDmode)
1569 mode = GET_MODE (op1);
1570 if (mode != GET_MODE (opb1))
1571 mode = VOIDmode;
1574 /* A < B implies A + 1 <= B. */
1575 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1576 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1579 if (GET_CODE (a) == GT)
1580 std::swap (op0, op1);
1582 if (GET_CODE (b) == GE)
1583 std::swap (opb0, opb1);
1585 if (SCALAR_INT_MODE_P (mode)
1586 && rtx_equal_p (op1, opb1)
1587 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1588 return true;
1589 return false;
1592 /* A < B or A > B imply A != B. TODO: Likewise
1593 A + n < B implies A != B + n if neither wraps. */
1594 if (GET_CODE (b) == NE
1595 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1596 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1598 if (rtx_equal_p (op0, opb0)
1599 && rtx_equal_p (op1, opb1))
1600 return true;
1603 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1604 if (GET_CODE (a) == NE
1605 && op1 == const0_rtx)
1607 if ((GET_CODE (b) == GTU
1608 && opb1 == const0_rtx)
1609 || (GET_CODE (b) == GEU
1610 && opb1 == const1_rtx))
1611 return rtx_equal_p (op0, opb0);
1614 /* A != N is equivalent to A - (N + 1) <u -1. */
1615 if (GET_CODE (a) == NE
1616 && CONST_INT_P (op1)
1617 && GET_CODE (b) == LTU
1618 && opb1 == constm1_rtx
1619 && GET_CODE (opb0) == PLUS
1620 && CONST_INT_P (XEXP (opb0, 1))
1621 /* Avoid overflows. */
1622 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1623 != ((unsigned HOST_WIDE_INT)1
1624 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1625 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1626 return rtx_equal_p (op0, XEXP (opb0, 0));
1628 /* Likewise, A != N implies A - N > 0. */
1629 if (GET_CODE (a) == NE
1630 && CONST_INT_P (op1))
1632 if (GET_CODE (b) == GTU
1633 && GET_CODE (opb0) == PLUS
1634 && opb1 == const0_rtx
1635 && CONST_INT_P (XEXP (opb0, 1))
1636 /* Avoid overflows. */
1637 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1638 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1639 && rtx_equal_p (XEXP (opb0, 0), op0))
1640 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1641 if (GET_CODE (b) == GEU
1642 && GET_CODE (opb0) == PLUS
1643 && opb1 == const1_rtx
1644 && CONST_INT_P (XEXP (opb0, 1))
1645 /* Avoid overflows. */
1646 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1647 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1648 && rtx_equal_p (XEXP (opb0, 0), op0))
1649 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1652 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1653 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1654 && CONST_INT_P (op1)
1655 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1656 || INTVAL (op1) >= 0)
1657 && GET_CODE (b) == LTU
1658 && CONST_INT_P (opb1)
1659 && rtx_equal_p (op0, opb0))
1660 return INTVAL (opb1) < 0;
1662 return false;
1665 /* Canonicalizes COND so that
1667 (1) Ensure that operands are ordered according to
1668 swap_commutative_operands_p.
1669 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1670 for GE, GEU, and LEU. */
1673 canon_condition (rtx cond)
1675 rtx op0, op1;
1676 enum rtx_code code;
1677 machine_mode mode;
1679 code = GET_CODE (cond);
1680 op0 = XEXP (cond, 0);
1681 op1 = XEXP (cond, 1);
1683 if (swap_commutative_operands_p (op0, op1))
1685 code = swap_condition (code);
1686 std::swap (op0, op1);
1689 mode = GET_MODE (op0);
1690 if (mode == VOIDmode)
1691 mode = GET_MODE (op1);
1692 gcc_assert (mode != VOIDmode);
1694 if (CONST_SCALAR_INT_P (op1) && GET_MODE_CLASS (mode) != MODE_CC)
1696 rtx_mode_t const_val (op1, mode);
1698 switch (code)
1700 case LE:
1701 if (wi::ne_p (const_val, wi::max_value (mode, SIGNED)))
1703 code = LT;
1704 op1 = immed_wide_int_const (wi::add (const_val, 1), mode);
1706 break;
1708 case GE:
1709 if (wi::ne_p (const_val, wi::min_value (mode, SIGNED)))
1711 code = GT;
1712 op1 = immed_wide_int_const (wi::sub (const_val, 1), mode);
1714 break;
1716 case LEU:
1717 if (wi::ne_p (const_val, -1))
1719 code = LTU;
1720 op1 = immed_wide_int_const (wi::add (const_val, 1), mode);
1722 break;
1724 case GEU:
1725 if (wi::ne_p (const_val, 0))
1727 code = GTU;
1728 op1 = immed_wide_int_const (wi::sub (const_val, 1), mode);
1730 break;
1732 default:
1733 break;
1737 if (op0 != XEXP (cond, 0)
1738 || op1 != XEXP (cond, 1)
1739 || code != GET_CODE (cond)
1740 || GET_MODE (cond) != SImode)
1741 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1743 return cond;
1746 /* Reverses CONDition; returns NULL if we cannot. */
1748 static rtx
1749 reversed_condition (rtx cond)
1751 enum rtx_code reversed;
1752 reversed = reversed_comparison_code (cond, NULL);
1753 if (reversed == UNKNOWN)
1754 return NULL_RTX;
1755 else
1756 return gen_rtx_fmt_ee (reversed,
1757 GET_MODE (cond), XEXP (cond, 0),
1758 XEXP (cond, 1));
1761 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1762 set of altered regs. */
1764 void
1765 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1767 rtx rev, reve, exp = *expr;
1769 /* If some register gets altered later, we do not really speak about its
1770 value at the time of comparison. */
1771 if (altered && altered_reg_used (cond, altered))
1772 return;
1774 if (GET_CODE (cond) == EQ
1775 && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1777 *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1778 return;
1781 if (!COMPARISON_P (exp))
1782 return;
1784 rev = reversed_condition (cond);
1785 reve = reversed_condition (exp);
1787 cond = canon_condition (cond);
1788 exp = canon_condition (exp);
1789 if (rev)
1790 rev = canon_condition (rev);
1791 if (reve)
1792 reve = canon_condition (reve);
1794 if (rtx_equal_p (exp, cond))
1796 *expr = const_true_rtx;
1797 return;
1800 if (rev && rtx_equal_p (exp, rev))
1802 *expr = const0_rtx;
1803 return;
1806 if (implies_p (cond, exp))
1808 *expr = const_true_rtx;
1809 return;
1812 if (reve && implies_p (cond, reve))
1814 *expr = const0_rtx;
1815 return;
1818 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1819 be false. */
1820 if (rev && implies_p (exp, rev))
1822 *expr = const0_rtx;
1823 return;
1826 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1827 if (rev && reve && implies_p (reve, rev))
1829 *expr = const_true_rtx;
1830 return;
1833 /* We would like to have some other tests here. TODO. */
1835 return;
1838 /* Use relationship between A and *B to eventually eliminate *B.
1839 OP is the operation we consider. */
1841 static void
1842 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1844 switch (op)
1846 case AND:
1847 /* If A implies *B, we may replace *B by true. */
1848 if (implies_p (a, *b))
1849 *b = const_true_rtx;
1850 break;
1852 case IOR:
1853 /* If *B implies A, we may replace *B by false. */
1854 if (implies_p (*b, a))
1855 *b = const0_rtx;
1856 break;
1858 default:
1859 gcc_unreachable ();
1863 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1864 operation we consider. */
1866 static void
1867 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1869 rtx elt;
1871 for (elt = tail; elt; elt = XEXP (elt, 1))
1872 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1873 for (elt = tail; elt; elt = XEXP (elt, 1))
1874 eliminate_implied_condition (op, XEXP (elt, 0), head);
1877 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1878 is a list, its elements are assumed to be combined using OP. */
1880 static void
1881 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1883 bool expression_valid;
1884 rtx head, tail, last_valid_expr;
1885 rtx_expr_list *cond_list;
1886 rtx_insn *insn;
1887 rtx neutral, aggr;
1888 regset altered, this_altered;
1889 edge e;
1891 if (!*expr)
1892 return;
1894 if (CONSTANT_P (*expr))
1895 return;
1897 if (GET_CODE (*expr) == EXPR_LIST)
1899 head = XEXP (*expr, 0);
1900 tail = XEXP (*expr, 1);
1902 eliminate_implied_conditions (op, &head, tail);
1904 switch (op)
1906 case AND:
1907 neutral = const_true_rtx;
1908 aggr = const0_rtx;
1909 break;
1911 case IOR:
1912 neutral = const0_rtx;
1913 aggr = const_true_rtx;
1914 break;
1916 default:
1917 gcc_unreachable ();
1920 simplify_using_initial_values (loop, UNKNOWN, &head);
1921 if (head == aggr)
1923 XEXP (*expr, 0) = aggr;
1924 XEXP (*expr, 1) = NULL_RTX;
1925 return;
1927 else if (head == neutral)
1929 *expr = tail;
1930 simplify_using_initial_values (loop, op, expr);
1931 return;
1933 simplify_using_initial_values (loop, op, &tail);
1935 if (tail && XEXP (tail, 0) == aggr)
1937 *expr = tail;
1938 return;
1941 XEXP (*expr, 0) = head;
1942 XEXP (*expr, 1) = tail;
1943 return;
1946 gcc_assert (op == UNKNOWN);
1948 replace_single_def_regs (expr);
1949 if (CONSTANT_P (*expr))
1950 return;
1952 e = loop_preheader_edge (loop);
1953 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1954 return;
1956 altered = ALLOC_REG_SET (&reg_obstack);
1957 this_altered = ALLOC_REG_SET (&reg_obstack);
1959 expression_valid = true;
1960 last_valid_expr = *expr;
1961 cond_list = NULL;
1962 while (1)
1964 insn = BB_END (e->src);
1965 if (any_condjump_p (insn))
1967 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1969 if (cond && (e->flags & EDGE_FALLTHRU))
1970 cond = reversed_condition (cond);
1971 if (cond)
1973 rtx old = *expr;
1974 simplify_using_condition (cond, expr, altered);
1975 if (old != *expr)
1977 rtx note;
1978 if (CONSTANT_P (*expr))
1979 goto out;
1980 for (note = cond_list; note; note = XEXP (note, 1))
1982 simplify_using_condition (XEXP (note, 0), expr, altered);
1983 if (CONSTANT_P (*expr))
1984 goto out;
1987 cond_list = alloc_EXPR_LIST (0, cond, cond_list);
1991 FOR_BB_INSNS_REVERSE (e->src, insn)
1993 rtx src, dest;
1994 rtx old = *expr;
1996 if (!INSN_P (insn))
1997 continue;
1999 CLEAR_REG_SET (this_altered);
2000 note_stores (PATTERN (insn), mark_altered, this_altered);
2001 if (CALL_P (insn))
2003 /* Kill all call clobbered registers. */
2004 unsigned int i;
2005 hard_reg_set_iterator hrsi;
2006 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call,
2007 0, i, hrsi)
2008 SET_REGNO_REG_SET (this_altered, i);
2011 if (suitable_set_for_replacement (insn, &dest, &src))
2013 rtx_expr_list **pnote, **pnote_next;
2015 replace_in_expr (expr, dest, src);
2016 if (CONSTANT_P (*expr))
2017 goto out;
2019 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2021 rtx_expr_list *note = *pnote;
2022 rtx old_cond = XEXP (note, 0);
2024 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2025 replace_in_expr (&XEXP (note, 0), dest, src);
2027 /* We can no longer use a condition that has been simplified
2028 to a constant, and simplify_using_condition will abort if
2029 we try. */
2030 if (CONSTANT_P (XEXP (note, 0)))
2032 *pnote = *pnote_next;
2033 pnote_next = pnote;
2034 free_EXPR_LIST_node (note);
2036 /* Retry simplifications with this condition if either the
2037 expression or the condition changed. */
2038 else if (old_cond != XEXP (note, 0) || old != *expr)
2039 simplify_using_condition (XEXP (note, 0), expr, altered);
2042 else
2044 rtx_expr_list **pnote, **pnote_next;
2046 /* If we did not use this insn to make a replacement, any overlap
2047 between stores in this insn and our expression will cause the
2048 expression to become invalid. */
2049 if (altered_reg_used (*expr, this_altered))
2050 goto out;
2052 /* Likewise for the conditions. */
2053 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2055 rtx_expr_list *note = *pnote;
2056 rtx old_cond = XEXP (note, 0);
2058 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2059 if (altered_reg_used (old_cond, this_altered))
2061 *pnote = *pnote_next;
2062 pnote_next = pnote;
2063 free_EXPR_LIST_node (note);
2068 if (CONSTANT_P (*expr))
2069 goto out;
2071 IOR_REG_SET (altered, this_altered);
2073 /* If the expression now contains regs that have been altered, we
2074 can't return it to the caller. However, it is still valid for
2075 further simplification, so keep searching to see if we can
2076 eventually turn it into a constant. */
2077 if (altered_reg_used (*expr, altered))
2078 expression_valid = false;
2079 if (expression_valid)
2080 last_valid_expr = *expr;
2083 if (!single_pred_p (e->src)
2084 || single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2085 break;
2086 e = single_pred_edge (e->src);
2089 out:
2090 free_EXPR_LIST_list (&cond_list);
2091 if (!CONSTANT_P (*expr))
2092 *expr = last_valid_expr;
2093 FREE_REG_SET (altered);
2094 FREE_REG_SET (this_altered);
2097 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
2098 that IV occurs as left operands of comparison COND and its signedness
2099 is SIGNED_P to DESC. */
2101 static void
2102 shorten_into_mode (struct rtx_iv *iv, machine_mode mode,
2103 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2105 rtx mmin, mmax, cond_over, cond_under;
2107 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2108 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2109 iv->base, mmin);
2110 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2111 iv->base, mmax);
2113 switch (cond)
2115 case LE:
2116 case LT:
2117 case LEU:
2118 case LTU:
2119 if (cond_under != const0_rtx)
2120 desc->infinite =
2121 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2122 if (cond_over != const0_rtx)
2123 desc->noloop_assumptions =
2124 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2125 break;
2127 case GE:
2128 case GT:
2129 case GEU:
2130 case GTU:
2131 if (cond_over != const0_rtx)
2132 desc->infinite =
2133 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2134 if (cond_under != const0_rtx)
2135 desc->noloop_assumptions =
2136 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2137 break;
2139 case NE:
2140 if (cond_over != const0_rtx)
2141 desc->infinite =
2142 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2143 if (cond_under != const0_rtx)
2144 desc->infinite =
2145 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2146 break;
2148 default:
2149 gcc_unreachable ();
2152 iv->mode = mode;
2153 iv->extend = signed_p ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
2156 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2157 subregs of the same mode if possible (sometimes it is necessary to add
2158 some assumptions to DESC). */
2160 static bool
2161 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2162 enum rtx_code cond, struct niter_desc *desc)
2164 machine_mode comp_mode;
2165 bool signed_p;
2167 /* If the ivs behave specially in the first iteration, or are
2168 added/multiplied after extending, we ignore them. */
2169 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2170 return false;
2171 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2172 return false;
2174 /* If there is some extend, it must match signedness of the comparison. */
2175 switch (cond)
2177 case LE:
2178 case LT:
2179 if (iv0->extend == IV_ZERO_EXTEND
2180 || iv1->extend == IV_ZERO_EXTEND)
2181 return false;
2182 signed_p = true;
2183 break;
2185 case LEU:
2186 case LTU:
2187 if (iv0->extend == IV_SIGN_EXTEND
2188 || iv1->extend == IV_SIGN_EXTEND)
2189 return false;
2190 signed_p = false;
2191 break;
2193 case NE:
2194 if (iv0->extend != IV_UNKNOWN_EXTEND
2195 && iv1->extend != IV_UNKNOWN_EXTEND
2196 && iv0->extend != iv1->extend)
2197 return false;
2199 signed_p = false;
2200 if (iv0->extend != IV_UNKNOWN_EXTEND)
2201 signed_p = iv0->extend == IV_SIGN_EXTEND;
2202 if (iv1->extend != IV_UNKNOWN_EXTEND)
2203 signed_p = iv1->extend == IV_SIGN_EXTEND;
2204 break;
2206 default:
2207 gcc_unreachable ();
2210 /* Values of both variables should be computed in the same mode. These
2211 might indeed be different, if we have comparison like
2213 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2215 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2216 in different modes. This does not seem impossible to handle, but
2217 it hardly ever occurs in practice.
2219 The only exception is the case when one of operands is invariant.
2220 For example pentium 3 generates comparisons like
2221 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2222 definitely do not want this prevent the optimization. */
2223 comp_mode = iv0->extend_mode;
2224 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2225 comp_mode = iv1->extend_mode;
2227 if (iv0->extend_mode != comp_mode)
2229 if (iv0->mode != iv0->extend_mode
2230 || iv0->step != const0_rtx)
2231 return false;
2233 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2234 comp_mode, iv0->base, iv0->mode);
2235 iv0->extend_mode = comp_mode;
2238 if (iv1->extend_mode != comp_mode)
2240 if (iv1->mode != iv1->extend_mode
2241 || iv1->step != const0_rtx)
2242 return false;
2244 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2245 comp_mode, iv1->base, iv1->mode);
2246 iv1->extend_mode = comp_mode;
2249 /* Check that both ivs belong to a range of a single mode. If one of the
2250 operands is an invariant, we may need to shorten it into the common
2251 mode. */
2252 if (iv0->mode == iv0->extend_mode
2253 && iv0->step == const0_rtx
2254 && iv0->mode != iv1->mode)
2255 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2257 if (iv1->mode == iv1->extend_mode
2258 && iv1->step == const0_rtx
2259 && iv0->mode != iv1->mode)
2260 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2262 if (iv0->mode != iv1->mode)
2263 return false;
2265 desc->mode = iv0->mode;
2266 desc->signed_p = signed_p;
2268 return true;
2271 /* Tries to estimate the maximum number of iterations in LOOP, and return the
2272 result. This function is called from iv_number_of_iterations with
2273 a number of fields in DESC already filled in. OLD_NITER is the original
2274 expression for the number of iterations, before we tried to simplify it. */
2276 static uint64_t
2277 determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2279 rtx niter = desc->niter_expr;
2280 rtx mmin, mmax, cmp;
2281 uint64_t nmax, inc;
2282 uint64_t andmax = 0;
2284 /* We used to look for constant operand 0 of AND,
2285 but canonicalization should always make this impossible. */
2286 gcc_checking_assert (GET_CODE (niter) != AND
2287 || !CONST_INT_P (XEXP (niter, 0)));
2289 if (GET_CODE (niter) == AND
2290 && CONST_INT_P (XEXP (niter, 1)))
2292 andmax = UINTVAL (XEXP (niter, 1));
2293 niter = XEXP (niter, 0);
2296 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2297 nmax = UINTVAL (mmax) - UINTVAL (mmin);
2299 if (GET_CODE (niter) == UDIV)
2301 if (!CONST_INT_P (XEXP (niter, 1)))
2302 return nmax;
2303 inc = INTVAL (XEXP (niter, 1));
2304 niter = XEXP (niter, 0);
2306 else
2307 inc = 1;
2309 /* We could use a binary search here, but for now improving the upper
2310 bound by just one eliminates one important corner case. */
2311 cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2312 desc->mode, old_niter, mmax);
2313 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2314 if (cmp == const_true_rtx)
2316 nmax--;
2318 if (dump_file)
2319 fprintf (dump_file, ";; improved upper bound by one.\n");
2321 nmax /= inc;
2322 if (andmax)
2323 nmax = MIN (nmax, andmax);
2324 if (dump_file)
2325 fprintf (dump_file, ";; Determined upper bound %" PRId64".\n",
2326 nmax);
2327 return nmax;
2330 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2331 the result into DESC. Very similar to determine_number_of_iterations
2332 (basically its rtl version), complicated by things like subregs. */
2334 static void
2335 iv_number_of_iterations (struct loop *loop, rtx_insn *insn, rtx condition,
2336 struct niter_desc *desc)
2338 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2339 struct rtx_iv iv0, iv1;
2340 rtx assumption, may_not_xform;
2341 enum rtx_code cond;
2342 machine_mode mode, comp_mode;
2343 rtx mmin, mmax, mode_mmin, mode_mmax;
2344 uint64_t s, size, d, inv, max;
2345 int64_t up, down, inc, step_val;
2346 int was_sharp = false;
2347 rtx old_niter;
2348 bool step_is_pow2;
2350 /* The meaning of these assumptions is this:
2351 if !assumptions
2352 then the rest of information does not have to be valid
2353 if noloop_assumptions then the loop does not roll
2354 if infinite then this exit is never used */
2356 desc->assumptions = NULL_RTX;
2357 desc->noloop_assumptions = NULL_RTX;
2358 desc->infinite = NULL_RTX;
2359 desc->simple_p = true;
2361 desc->const_iter = false;
2362 desc->niter_expr = NULL_RTX;
2364 cond = GET_CODE (condition);
2365 gcc_assert (COMPARISON_P (condition));
2367 mode = GET_MODE (XEXP (condition, 0));
2368 if (mode == VOIDmode)
2369 mode = GET_MODE (XEXP (condition, 1));
2370 /* The constant comparisons should be folded. */
2371 gcc_assert (mode != VOIDmode);
2373 /* We only handle integers or pointers. */
2374 if (GET_MODE_CLASS (mode) != MODE_INT
2375 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2376 goto fail;
2378 op0 = XEXP (condition, 0);
2379 if (!iv_analyze (insn, op0, &iv0))
2380 goto fail;
2381 if (iv0.extend_mode == VOIDmode)
2382 iv0.mode = iv0.extend_mode = mode;
2384 op1 = XEXP (condition, 1);
2385 if (!iv_analyze (insn, op1, &iv1))
2386 goto fail;
2387 if (iv1.extend_mode == VOIDmode)
2388 iv1.mode = iv1.extend_mode = mode;
2390 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2391 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2392 goto fail;
2394 /* Check condition and normalize it. */
2396 switch (cond)
2398 case GE:
2399 case GT:
2400 case GEU:
2401 case GTU:
2402 std::swap (iv0, iv1);
2403 cond = swap_condition (cond);
2404 break;
2405 case NE:
2406 case LE:
2407 case LEU:
2408 case LT:
2409 case LTU:
2410 break;
2411 default:
2412 goto fail;
2415 /* Handle extends. This is relatively nontrivial, so we only try in some
2416 easy cases, when we can canonicalize the ivs (possibly by adding some
2417 assumptions) to shape subreg (base + i * step). This function also fills
2418 in desc->mode and desc->signed_p. */
2420 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2421 goto fail;
2423 comp_mode = iv0.extend_mode;
2424 mode = iv0.mode;
2425 size = GET_MODE_PRECISION (mode);
2426 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2427 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2428 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2430 if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
2431 goto fail;
2433 /* We can take care of the case of two induction variables chasing each other
2434 if the test is NE. I have never seen a loop using it, but still it is
2435 cool. */
2436 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2438 if (cond != NE)
2439 goto fail;
2441 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2442 iv1.step = const0_rtx;
2445 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2446 iv1.step = lowpart_subreg (mode, iv1.step, comp_mode);
2448 /* This is either infinite loop or the one that ends immediately, depending
2449 on initial values. Unswitching should remove this kind of conditions. */
2450 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2451 goto fail;
2453 if (cond != NE)
2455 if (iv0.step == const0_rtx)
2456 step_val = -INTVAL (iv1.step);
2457 else
2458 step_val = INTVAL (iv0.step);
2460 /* Ignore loops of while (i-- < 10) type. */
2461 if (step_val < 0)
2462 goto fail;
2464 step_is_pow2 = !(step_val & (step_val - 1));
2466 else
2468 /* We do not care about whether the step is power of two in this
2469 case. */
2470 step_is_pow2 = false;
2471 step_val = 0;
2474 /* Some more condition normalization. We must record some assumptions
2475 due to overflows. */
2476 switch (cond)
2478 case LT:
2479 case LTU:
2480 /* We want to take care only of non-sharp relationals; this is easy,
2481 as in cases the overflow would make the transformation unsafe
2482 the loop does not roll. Seemingly it would make more sense to want
2483 to take care of sharp relationals instead, as NE is more similar to
2484 them, but the problem is that here the transformation would be more
2485 difficult due to possibly infinite loops. */
2486 if (iv0.step == const0_rtx)
2488 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2489 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2490 mode_mmax);
2491 if (assumption == const_true_rtx)
2492 goto zero_iter_simplify;
2493 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2494 iv0.base, const1_rtx);
2496 else
2498 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2499 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2500 mode_mmin);
2501 if (assumption == const_true_rtx)
2502 goto zero_iter_simplify;
2503 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2504 iv1.base, constm1_rtx);
2507 if (assumption != const0_rtx)
2508 desc->noloop_assumptions =
2509 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2510 cond = (cond == LT) ? LE : LEU;
2512 /* It will be useful to be able to tell the difference once more in
2513 LE -> NE reduction. */
2514 was_sharp = true;
2515 break;
2516 default: ;
2519 /* Take care of trivially infinite loops. */
2520 if (cond != NE)
2522 if (iv0.step == const0_rtx)
2524 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2525 if (rtx_equal_p (tmp, mode_mmin))
2527 desc->infinite =
2528 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2529 /* Fill in the remaining fields somehow. */
2530 goto zero_iter_simplify;
2533 else
2535 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2536 if (rtx_equal_p (tmp, mode_mmax))
2538 desc->infinite =
2539 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2540 /* Fill in the remaining fields somehow. */
2541 goto zero_iter_simplify;
2546 /* If we can we want to take care of NE conditions instead of size
2547 comparisons, as they are much more friendly (most importantly
2548 this takes care of special handling of loops with step 1). We can
2549 do it if we first check that upper bound is greater or equal to
2550 lower bound, their difference is constant c modulo step and that
2551 there is not an overflow. */
2552 if (cond != NE)
2554 if (iv0.step == const0_rtx)
2555 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2556 else
2557 step = iv0.step;
2558 step = lowpart_subreg (mode, step, comp_mode);
2559 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2560 delta = lowpart_subreg (mode, delta, comp_mode);
2561 delta = simplify_gen_binary (UMOD, mode, delta, step);
2562 may_xform = const0_rtx;
2563 may_not_xform = const_true_rtx;
2565 if (CONST_INT_P (delta))
2567 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2569 /* A special case. We have transformed condition of type
2570 for (i = 0; i < 4; i += 4)
2571 into
2572 for (i = 0; i <= 3; i += 4)
2573 obviously if the test for overflow during that transformation
2574 passed, we cannot overflow here. Most importantly any
2575 loop with sharp end condition and step 1 falls into this
2576 category, so handling this case specially is definitely
2577 worth the troubles. */
2578 may_xform = const_true_rtx;
2580 else if (iv0.step == const0_rtx)
2582 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2583 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2584 bound = lowpart_subreg (mode, bound, comp_mode);
2585 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2586 may_xform = simplify_gen_relational (cond, SImode, mode,
2587 bound, tmp);
2588 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2589 SImode, mode,
2590 bound, tmp);
2592 else
2594 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2595 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2596 bound = lowpart_subreg (mode, bound, comp_mode);
2597 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2598 may_xform = simplify_gen_relational (cond, SImode, mode,
2599 tmp, bound);
2600 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2601 SImode, mode,
2602 tmp, bound);
2606 if (may_xform != const0_rtx)
2608 /* We perform the transformation always provided that it is not
2609 completely senseless. This is OK, as we would need this assumption
2610 to determine the number of iterations anyway. */
2611 if (may_xform != const_true_rtx)
2613 /* If the step is a power of two and the final value we have
2614 computed overflows, the cycle is infinite. Otherwise it
2615 is nontrivial to compute the number of iterations. */
2616 if (step_is_pow2)
2617 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2618 desc->infinite);
2619 else
2620 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2621 desc->assumptions);
2624 /* We are going to lose some information about upper bound on
2625 number of iterations in this step, so record the information
2626 here. */
2627 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2628 if (CONST_INT_P (iv1.base))
2629 up = INTVAL (iv1.base);
2630 else
2631 up = INTVAL (mode_mmax) - inc;
2632 down = INTVAL (CONST_INT_P (iv0.base)
2633 ? iv0.base
2634 : mode_mmin);
2635 max = (uint64_t) (up - down) / inc + 1;
2636 if (!desc->infinite
2637 && !desc->assumptions)
2638 record_niter_bound (loop, max, false, true);
2640 if (iv0.step == const0_rtx)
2642 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2643 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2645 else
2647 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2648 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2651 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2652 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2653 assumption = simplify_gen_relational (reverse_condition (cond),
2654 SImode, mode, tmp0, tmp1);
2655 if (assumption == const_true_rtx)
2656 goto zero_iter_simplify;
2657 else if (assumption != const0_rtx)
2658 desc->noloop_assumptions =
2659 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2660 cond = NE;
2664 /* Count the number of iterations. */
2665 if (cond == NE)
2667 /* Everything we do here is just arithmetics modulo size of mode. This
2668 makes us able to do more involved computations of number of iterations
2669 than in other cases. First transform the condition into shape
2670 s * i <> c, with s positive. */
2671 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2672 iv0.base = const0_rtx;
2673 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2674 iv1.step = const0_rtx;
2675 if (INTVAL (iv0.step) < 0)
2677 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, comp_mode);
2678 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, comp_mode);
2680 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2682 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2683 is infinite. Otherwise, the number of iterations is
2684 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2685 s = INTVAL (iv0.step); d = 1;
2686 while (s % 2 != 1)
2688 s /= 2;
2689 d *= 2;
2690 size--;
2692 bound = GEN_INT (((uint64_t) 1 << (size - 1 ) << 1) - 1);
2694 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2695 tmp = simplify_gen_binary (UMOD, mode, tmp1, gen_int_mode (d, mode));
2696 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2697 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2699 tmp = simplify_gen_binary (UDIV, mode, tmp1, gen_int_mode (d, mode));
2700 inv = inverse (s, size);
2701 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2702 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2704 else
2706 if (iv1.step == const0_rtx)
2707 /* Condition in shape a + s * i <= b
2708 We must know that b + s does not overflow and a <= b + s and then we
2709 can compute number of iterations as (b + s - a) / s. (It might
2710 seem that we in fact could be more clever about testing the b + s
2711 overflow condition using some information about b - a mod s,
2712 but it was already taken into account during LE -> NE transform). */
2714 step = iv0.step;
2715 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2716 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2718 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2719 lowpart_subreg (mode, step,
2720 comp_mode));
2721 if (step_is_pow2)
2723 rtx t0, t1;
2725 /* If s is power of 2, we know that the loop is infinite if
2726 a % s <= b % s and b + s overflows. */
2727 assumption = simplify_gen_relational (reverse_condition (cond),
2728 SImode, mode,
2729 tmp1, bound);
2731 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2732 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2733 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2734 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2735 desc->infinite =
2736 alloc_EXPR_LIST (0, assumption, desc->infinite);
2738 else
2740 assumption = simplify_gen_relational (cond, SImode, mode,
2741 tmp1, bound);
2742 desc->assumptions =
2743 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2746 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2747 tmp = lowpart_subreg (mode, tmp, comp_mode);
2748 assumption = simplify_gen_relational (reverse_condition (cond),
2749 SImode, mode, tmp0, tmp);
2751 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2752 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2754 else
2756 /* Condition in shape a <= b - s * i
2757 We must know that a - s does not overflow and a - s <= b and then
2758 we can again compute number of iterations as (b - (a - s)) / s. */
2759 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2760 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2761 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2763 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2764 lowpart_subreg (mode, step, comp_mode));
2765 if (step_is_pow2)
2767 rtx t0, t1;
2769 /* If s is power of 2, we know that the loop is infinite if
2770 a % s <= b % s and a - s overflows. */
2771 assumption = simplify_gen_relational (reverse_condition (cond),
2772 SImode, mode,
2773 bound, tmp0);
2775 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2776 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2777 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2778 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2779 desc->infinite =
2780 alloc_EXPR_LIST (0, assumption, desc->infinite);
2782 else
2784 assumption = simplify_gen_relational (cond, SImode, mode,
2785 bound, tmp0);
2786 desc->assumptions =
2787 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2790 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2791 tmp = lowpart_subreg (mode, tmp, comp_mode);
2792 assumption = simplify_gen_relational (reverse_condition (cond),
2793 SImode, mode,
2794 tmp, tmp1);
2795 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2796 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2798 if (assumption == const_true_rtx)
2799 goto zero_iter_simplify;
2800 else if (assumption != const0_rtx)
2801 desc->noloop_assumptions =
2802 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2803 delta = simplify_gen_binary (UDIV, mode, delta, step);
2804 desc->niter_expr = delta;
2807 old_niter = desc->niter_expr;
2809 simplify_using_initial_values (loop, AND, &desc->assumptions);
2810 if (desc->assumptions
2811 && XEXP (desc->assumptions, 0) == const0_rtx)
2812 goto fail;
2813 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2814 simplify_using_initial_values (loop, IOR, &desc->infinite);
2815 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2817 /* Rerun the simplification. Consider code (created by copying loop headers)
2819 i = 0;
2821 if (0 < n)
2825 i++;
2826 } while (i < n);
2829 The first pass determines that i = 0, the second pass uses it to eliminate
2830 noloop assumption. */
2832 simplify_using_initial_values (loop, AND, &desc->assumptions);
2833 if (desc->assumptions
2834 && XEXP (desc->assumptions, 0) == const0_rtx)
2835 goto fail;
2836 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2837 simplify_using_initial_values (loop, IOR, &desc->infinite);
2838 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2840 if (desc->noloop_assumptions
2841 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2842 goto zero_iter;
2844 if (CONST_INT_P (desc->niter_expr))
2846 uint64_t val = INTVAL (desc->niter_expr);
2848 desc->const_iter = true;
2849 desc->niter = val & GET_MODE_MASK (desc->mode);
2850 if (!desc->infinite
2851 && !desc->assumptions)
2852 record_niter_bound (loop, desc->niter, false, true);
2854 else
2856 max = determine_max_iter (loop, desc, old_niter);
2857 if (!max)
2858 goto zero_iter_simplify;
2859 if (!desc->infinite
2860 && !desc->assumptions)
2861 record_niter_bound (loop, max, false, true);
2863 /* simplify_using_initial_values does a copy propagation on the registers
2864 in the expression for the number of iterations. This prolongs life
2865 ranges of registers and increases register pressure, and usually
2866 brings no gain (and if it happens to do, the cse pass will take care
2867 of it anyway). So prevent this behavior, unless it enabled us to
2868 derive that the number of iterations is a constant. */
2869 desc->niter_expr = old_niter;
2872 return;
2874 zero_iter_simplify:
2875 /* Simplify the assumptions. */
2876 simplify_using_initial_values (loop, AND, &desc->assumptions);
2877 if (desc->assumptions
2878 && XEXP (desc->assumptions, 0) == const0_rtx)
2879 goto fail;
2880 simplify_using_initial_values (loop, IOR, &desc->infinite);
2882 /* Fallthru. */
2883 zero_iter:
2884 desc->const_iter = true;
2885 desc->niter = 0;
2886 record_niter_bound (loop, 0, true, true);
2887 desc->noloop_assumptions = NULL_RTX;
2888 desc->niter_expr = const0_rtx;
2889 return;
2891 fail:
2892 desc->simple_p = false;
2893 return;
2896 /* Checks whether E is a simple exit from LOOP and stores its description
2897 into DESC. */
2899 static void
2900 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2902 basic_block exit_bb;
2903 rtx condition;
2904 rtx_insn *at;
2905 edge ein;
2907 exit_bb = e->src;
2908 desc->simple_p = false;
2910 /* It must belong directly to the loop. */
2911 if (exit_bb->loop_father != loop)
2912 return;
2914 /* It must be tested (at least) once during any iteration. */
2915 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2916 return;
2918 /* It must end in a simple conditional jump. */
2919 if (!any_condjump_p (BB_END (exit_bb)))
2920 return;
2922 ein = EDGE_SUCC (exit_bb, 0);
2923 if (ein == e)
2924 ein = EDGE_SUCC (exit_bb, 1);
2926 desc->out_edge = e;
2927 desc->in_edge = ein;
2929 /* Test whether the condition is suitable. */
2930 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2931 return;
2933 if (ein->flags & EDGE_FALLTHRU)
2935 condition = reversed_condition (condition);
2936 if (!condition)
2937 return;
2940 /* Check that we are able to determine number of iterations and fill
2941 in information about it. */
2942 iv_number_of_iterations (loop, at, condition, desc);
2945 /* Finds a simple exit of LOOP and stores its description into DESC. */
2947 void
2948 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2950 unsigned i;
2951 basic_block *body;
2952 edge e;
2953 struct niter_desc act;
2954 bool any = false;
2955 edge_iterator ei;
2957 desc->simple_p = false;
2958 body = get_loop_body (loop);
2960 for (i = 0; i < loop->num_nodes; i++)
2962 FOR_EACH_EDGE (e, ei, body[i]->succs)
2964 if (flow_bb_inside_loop_p (loop, e->dest))
2965 continue;
2967 check_simple_exit (loop, e, &act);
2968 if (!act.simple_p)
2969 continue;
2971 if (!any)
2972 any = true;
2973 else
2975 /* Prefer constant iterations; the less the better. */
2976 if (!act.const_iter
2977 || (desc->const_iter && act.niter >= desc->niter))
2978 continue;
2980 /* Also if the actual exit may be infinite, while the old one
2981 not, prefer the old one. */
2982 if (act.infinite && !desc->infinite)
2983 continue;
2986 *desc = act;
2990 if (dump_file)
2992 if (desc->simple_p)
2994 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2995 fprintf (dump_file, " simple exit %d -> %d\n",
2996 desc->out_edge->src->index,
2997 desc->out_edge->dest->index);
2998 if (desc->assumptions)
3000 fprintf (dump_file, " assumptions: ");
3001 print_rtl (dump_file, desc->assumptions);
3002 fprintf (dump_file, "\n");
3004 if (desc->noloop_assumptions)
3006 fprintf (dump_file, " does not roll if: ");
3007 print_rtl (dump_file, desc->noloop_assumptions);
3008 fprintf (dump_file, "\n");
3010 if (desc->infinite)
3012 fprintf (dump_file, " infinite if: ");
3013 print_rtl (dump_file, desc->infinite);
3014 fprintf (dump_file, "\n");
3017 fprintf (dump_file, " number of iterations: ");
3018 print_rtl (dump_file, desc->niter_expr);
3019 fprintf (dump_file, "\n");
3021 fprintf (dump_file, " upper bound: %li\n",
3022 (long)get_max_loop_iterations_int (loop));
3023 fprintf (dump_file, " realistic bound: %li\n",
3024 (long)get_estimated_loop_iterations_int (loop));
3026 else
3027 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
3030 free (body);
3033 /* Creates a simple loop description of LOOP if it was not computed
3034 already. */
3036 struct niter_desc *
3037 get_simple_loop_desc (struct loop *loop)
3039 struct niter_desc *desc = simple_loop_desc (loop);
3041 if (desc)
3042 return desc;
3044 /* At least desc->infinite is not always initialized by
3045 find_simple_loop_exit. */
3046 desc = ggc_cleared_alloc<niter_desc> ();
3047 iv_analysis_loop_init (loop);
3048 find_simple_exit (loop, desc);
3049 loop->simple_loop_desc = desc;
3051 if (desc->simple_p && (desc->assumptions || desc->infinite))
3053 const char *wording;
3055 /* Assume that no overflow happens and that the loop is finite.
3056 We already warned at the tree level if we ran optimizations there. */
3057 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
3059 if (desc->infinite)
3061 wording =
3062 flag_unsafe_loop_optimizations
3063 ? N_("assuming that the loop is not infinite")
3064 : N_("cannot optimize possibly infinite loops");
3065 warning (OPT_Wunsafe_loop_optimizations, "%s",
3066 gettext (wording));
3068 if (desc->assumptions)
3070 wording =
3071 flag_unsafe_loop_optimizations
3072 ? N_("assuming that the loop counter does not overflow")
3073 : N_("cannot optimize loop, the loop counter may overflow");
3074 warning (OPT_Wunsafe_loop_optimizations, "%s",
3075 gettext (wording));
3079 if (flag_unsafe_loop_optimizations)
3081 desc->assumptions = NULL_RTX;
3082 desc->infinite = NULL_RTX;
3086 return desc;
3089 /* Releases simple loop description for LOOP. */
3091 void
3092 free_simple_loop_desc (struct loop *loop)
3094 struct niter_desc *desc = simple_loop_desc (loop);
3096 if (!desc)
3097 return;
3099 ggc_free (desc);
3100 loop->simple_loop_desc = NULL;