Move some comparison simplifications to match.pd
[official-gcc.git] / gcc / loop-iv.c
blob1c9a159ed318fcdb02bc1c6b39f0414d05d047b6
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is a simple analysis of induction variables of the loop. The major use
21 is for determining the number of iterations of a loop for loop unrolling,
22 doloop optimization and branch prediction. The iv information is computed
23 on demand.
25 Induction variables are analyzed by walking the use-def chains. When
26 a basic induction variable (biv) is found, it is cached in the bivs
27 hash table. When register is proved to be a biv, its description
28 is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "backend.h"
54 #include "tree.h"
55 #include "rtl.h"
56 #include "df.h"
57 #include "cfgloop.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "calls.h"
65 #include "emit-rtl.h"
66 #include "varasm.h"
67 #include "stmt.h"
68 #include "expr.h"
69 #include "intl.h"
70 #include "diagnostic-core.h"
71 #include "dumpfile.h"
72 #include "rtl-iter.h"
74 /* Possible return values of iv_get_reaching_def. */
76 enum iv_grd_result
78 /* More than one reaching def, or reaching def that does not
79 dominate the use. */
80 GRD_INVALID,
82 /* The use is trivial invariant of the loop, i.e. is not changed
83 inside the loop. */
84 GRD_INVARIANT,
86 /* The use is reached by initial value and a value from the
87 previous iteration. */
88 GRD_MAYBE_BIV,
90 /* The use has single dominating def. */
91 GRD_SINGLE_DOM
94 /* Information about a biv. */
96 struct biv_entry
98 unsigned regno; /* The register of the biv. */
99 struct rtx_iv iv; /* Value of the biv. */
102 static bool clean_slate = true;
104 static unsigned int iv_ref_table_size = 0;
106 /* Table of rtx_ivs indexed by the df_ref uid field. */
107 static struct rtx_iv ** iv_ref_table;
109 /* Induction variable stored at the reference. */
110 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID (REF)]
111 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID (REF)] = (IV)
113 /* The current loop. */
115 static struct loop *current_loop;
117 /* Hashtable helper. */
119 struct biv_entry_hasher : free_ptr_hash <biv_entry>
121 typedef rtx_def *compare_type;
122 static inline hashval_t hash (const biv_entry *);
123 static inline bool equal (const biv_entry *, const rtx_def *);
126 /* Returns hash value for biv B. */
128 inline hashval_t
129 biv_entry_hasher::hash (const biv_entry *b)
131 return b->regno;
134 /* Compares biv B and register R. */
136 inline bool
137 biv_entry_hasher::equal (const biv_entry *b, const rtx_def *r)
139 return b->regno == REGNO (r);
142 /* Bivs of the current loop. */
144 static hash_table<biv_entry_hasher> *bivs;
146 static bool iv_analyze_op (rtx_insn *, rtx, struct rtx_iv *);
148 /* Return the RTX code corresponding to the IV extend code EXTEND. */
149 static inline enum rtx_code
150 iv_extend_to_rtx_code (enum iv_extend_code extend)
152 switch (extend)
154 case IV_SIGN_EXTEND:
155 return SIGN_EXTEND;
156 case IV_ZERO_EXTEND:
157 return ZERO_EXTEND;
158 case IV_UNKNOWN_EXTEND:
159 return UNKNOWN;
161 gcc_unreachable ();
164 /* Dumps information about IV to FILE. */
166 extern void dump_iv_info (FILE *, struct rtx_iv *);
167 void
168 dump_iv_info (FILE *file, struct rtx_iv *iv)
170 if (!iv->base)
172 fprintf (file, "not simple");
173 return;
176 if (iv->step == const0_rtx
177 && !iv->first_special)
178 fprintf (file, "invariant ");
180 print_rtl (file, iv->base);
181 if (iv->step != const0_rtx)
183 fprintf (file, " + ");
184 print_rtl (file, iv->step);
185 fprintf (file, " * iteration");
187 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
189 if (iv->mode != iv->extend_mode)
190 fprintf (file, " %s to %s",
191 rtx_name[iv_extend_to_rtx_code (iv->extend)],
192 GET_MODE_NAME (iv->extend_mode));
194 if (iv->mult != const1_rtx)
196 fprintf (file, " * ");
197 print_rtl (file, iv->mult);
199 if (iv->delta != const0_rtx)
201 fprintf (file, " + ");
202 print_rtl (file, iv->delta);
204 if (iv->first_special)
205 fprintf (file, " (first special)");
208 static void
209 check_iv_ref_table_size (void)
211 if (iv_ref_table_size < DF_DEFS_TABLE_SIZE ())
213 unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
214 iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
215 memset (&iv_ref_table[iv_ref_table_size], 0,
216 (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
217 iv_ref_table_size = new_size;
222 /* Checks whether REG is a well-behaved register. */
224 static bool
225 simple_reg_p (rtx reg)
227 unsigned r;
229 if (GET_CODE (reg) == SUBREG)
231 if (!subreg_lowpart_p (reg))
232 return false;
233 reg = SUBREG_REG (reg);
236 if (!REG_P (reg))
237 return false;
239 r = REGNO (reg);
240 if (HARD_REGISTER_NUM_P (r))
241 return false;
243 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
244 return false;
246 return true;
249 /* Clears the information about ivs stored in df. */
251 static void
252 clear_iv_info (void)
254 unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
255 struct rtx_iv *iv;
257 check_iv_ref_table_size ();
258 for (i = 0; i < n_defs; i++)
260 iv = iv_ref_table[i];
261 if (iv)
263 free (iv);
264 iv_ref_table[i] = NULL;
268 bivs->empty ();
272 /* Prepare the data for an induction variable analysis of a LOOP. */
274 void
275 iv_analysis_loop_init (struct loop *loop)
277 current_loop = loop;
279 /* Clear the information from the analysis of the previous loop. */
280 if (clean_slate)
282 df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
283 bivs = new hash_table<biv_entry_hasher> (10);
284 clean_slate = false;
286 else
287 clear_iv_info ();
289 /* Get rid of the ud chains before processing the rescans. Then add
290 the problem back. */
291 df_remove_problem (df_chain);
292 df_process_deferred_rescans ();
293 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
294 df_chain_add_problem (DF_UD_CHAIN);
295 df_note_add_problem ();
296 df_analyze_loop (loop);
297 if (dump_file)
298 df_dump_region (dump_file);
300 check_iv_ref_table_size ();
303 /* Finds the definition of REG that dominates loop latch and stores
304 it to DEF. Returns false if there is not a single definition
305 dominating the latch. If REG has no definition in loop, DEF
306 is set to NULL and true is returned. */
308 static bool
309 latch_dominating_def (rtx reg, df_ref *def)
311 df_ref single_rd = NULL, adef;
312 unsigned regno = REGNO (reg);
313 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
315 for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
317 if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
318 || !bitmap_bit_p (&bb_info->out, DF_REF_ID (adef)))
319 continue;
321 /* More than one reaching definition. */
322 if (single_rd)
323 return false;
325 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
326 return false;
328 single_rd = adef;
331 *def = single_rd;
332 return true;
335 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
337 static enum iv_grd_result
338 iv_get_reaching_def (rtx_insn *insn, rtx reg, df_ref *def)
340 df_ref use, adef;
341 basic_block def_bb, use_bb;
342 rtx_insn *def_insn;
343 bool dom_p;
345 *def = NULL;
346 if (!simple_reg_p (reg))
347 return GRD_INVALID;
348 if (GET_CODE (reg) == SUBREG)
349 reg = SUBREG_REG (reg);
350 gcc_assert (REG_P (reg));
352 use = df_find_use (insn, reg);
353 gcc_assert (use != NULL);
355 if (!DF_REF_CHAIN (use))
356 return GRD_INVARIANT;
358 /* More than one reaching def. */
359 if (DF_REF_CHAIN (use)->next)
360 return GRD_INVALID;
362 adef = DF_REF_CHAIN (use)->ref;
364 /* We do not handle setting only part of the register. */
365 if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
366 return GRD_INVALID;
368 def_insn = DF_REF_INSN (adef);
369 def_bb = DF_REF_BB (adef);
370 use_bb = BLOCK_FOR_INSN (insn);
372 if (use_bb == def_bb)
373 dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
374 else
375 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
377 if (dom_p)
379 *def = adef;
380 return GRD_SINGLE_DOM;
383 /* The definition does not dominate the use. This is still OK if
384 this may be a use of a biv, i.e. if the def_bb dominates loop
385 latch. */
386 if (just_once_each_iteration_p (current_loop, def_bb))
387 return GRD_MAYBE_BIV;
389 return GRD_INVALID;
392 /* Sets IV to invariant CST in MODE. Always returns true (just for
393 consistency with other iv manipulation functions that may fail). */
395 static bool
396 iv_constant (struct rtx_iv *iv, rtx cst, machine_mode mode)
398 if (mode == VOIDmode)
399 mode = GET_MODE (cst);
401 iv->mode = mode;
402 iv->base = cst;
403 iv->step = const0_rtx;
404 iv->first_special = false;
405 iv->extend = IV_UNKNOWN_EXTEND;
406 iv->extend_mode = iv->mode;
407 iv->delta = const0_rtx;
408 iv->mult = const1_rtx;
410 return true;
413 /* Evaluates application of subreg to MODE on IV. */
415 static bool
416 iv_subreg (struct rtx_iv *iv, machine_mode mode)
418 /* If iv is invariant, just calculate the new value. */
419 if (iv->step == const0_rtx
420 && !iv->first_special)
422 rtx val = get_iv_value (iv, const0_rtx);
423 val = lowpart_subreg (mode, val,
424 iv->extend == IV_UNKNOWN_EXTEND
425 ? iv->mode : iv->extend_mode);
427 iv->base = val;
428 iv->extend = IV_UNKNOWN_EXTEND;
429 iv->mode = iv->extend_mode = mode;
430 iv->delta = const0_rtx;
431 iv->mult = const1_rtx;
432 return true;
435 if (iv->extend_mode == mode)
436 return true;
438 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
439 return false;
441 iv->extend = IV_UNKNOWN_EXTEND;
442 iv->mode = mode;
444 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
445 simplify_gen_binary (MULT, iv->extend_mode,
446 iv->base, iv->mult));
447 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
448 iv->mult = const1_rtx;
449 iv->delta = const0_rtx;
450 iv->first_special = false;
452 return true;
455 /* Evaluates application of EXTEND to MODE on IV. */
457 static bool
458 iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, machine_mode mode)
460 /* If iv is invariant, just calculate the new value. */
461 if (iv->step == const0_rtx
462 && !iv->first_special)
464 rtx val = get_iv_value (iv, const0_rtx);
465 if (iv->extend_mode != iv->mode
466 && iv->extend != IV_UNKNOWN_EXTEND
467 && iv->extend != extend)
468 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
469 val = simplify_gen_unary (iv_extend_to_rtx_code (extend), mode,
470 val,
471 iv->extend == extend
472 ? iv->extend_mode : iv->mode);
473 iv->base = val;
474 iv->extend = IV_UNKNOWN_EXTEND;
475 iv->mode = iv->extend_mode = mode;
476 iv->delta = const0_rtx;
477 iv->mult = const1_rtx;
478 return true;
481 if (mode != iv->extend_mode)
482 return false;
484 if (iv->extend != IV_UNKNOWN_EXTEND
485 && iv->extend != extend)
486 return false;
488 iv->extend = extend;
490 return true;
493 /* Evaluates negation of IV. */
495 static bool
496 iv_neg (struct rtx_iv *iv)
498 if (iv->extend == IV_UNKNOWN_EXTEND)
500 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
501 iv->base, iv->extend_mode);
502 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
503 iv->step, iv->extend_mode);
505 else
507 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
508 iv->delta, iv->extend_mode);
509 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
510 iv->mult, iv->extend_mode);
513 return true;
516 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
518 static bool
519 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
521 machine_mode mode;
522 rtx arg;
524 /* Extend the constant to extend_mode of the other operand if necessary. */
525 if (iv0->extend == IV_UNKNOWN_EXTEND
526 && iv0->mode == iv0->extend_mode
527 && iv0->step == const0_rtx
528 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
530 iv0->extend_mode = iv1->extend_mode;
531 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
532 iv0->base, iv0->mode);
534 if (iv1->extend == IV_UNKNOWN_EXTEND
535 && iv1->mode == iv1->extend_mode
536 && iv1->step == const0_rtx
537 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
539 iv1->extend_mode = iv0->extend_mode;
540 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
541 iv1->base, iv1->mode);
544 mode = iv0->extend_mode;
545 if (mode != iv1->extend_mode)
546 return false;
548 if (iv0->extend == IV_UNKNOWN_EXTEND
549 && iv1->extend == IV_UNKNOWN_EXTEND)
551 if (iv0->mode != iv1->mode)
552 return false;
554 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
555 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
557 return true;
560 /* Handle addition of constant. */
561 if (iv1->extend == IV_UNKNOWN_EXTEND
562 && iv1->mode == mode
563 && iv1->step == const0_rtx)
565 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
566 return true;
569 if (iv0->extend == IV_UNKNOWN_EXTEND
570 && iv0->mode == mode
571 && iv0->step == const0_rtx)
573 arg = iv0->base;
574 *iv0 = *iv1;
575 if (op == MINUS
576 && !iv_neg (iv0))
577 return false;
579 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
580 return true;
583 return false;
586 /* Evaluates multiplication of IV by constant CST. */
588 static bool
589 iv_mult (struct rtx_iv *iv, rtx mby)
591 machine_mode mode = iv->extend_mode;
593 if (GET_MODE (mby) != VOIDmode
594 && GET_MODE (mby) != mode)
595 return false;
597 if (iv->extend == IV_UNKNOWN_EXTEND)
599 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
600 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
602 else
604 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
605 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
608 return true;
611 /* Evaluates shift of IV by constant CST. */
613 static bool
614 iv_shift (struct rtx_iv *iv, rtx mby)
616 machine_mode mode = iv->extend_mode;
618 if (GET_MODE (mby) != VOIDmode
619 && GET_MODE (mby) != mode)
620 return false;
622 if (iv->extend == IV_UNKNOWN_EXTEND)
624 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
625 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
627 else
629 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
630 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
633 return true;
636 /* The recursive part of get_biv_step. Gets the value of the single value
637 defined by DEF wrto initial value of REG inside loop, in shape described
638 at get_biv_step. */
640 static bool
641 get_biv_step_1 (df_ref def, rtx reg,
642 rtx *inner_step, machine_mode *inner_mode,
643 enum iv_extend_code *extend, machine_mode outer_mode,
644 rtx *outer_step)
646 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
647 rtx next, nextr;
648 enum rtx_code code;
649 rtx_insn *insn = DF_REF_INSN (def);
650 df_ref next_def;
651 enum iv_grd_result res;
653 set = single_set (insn);
654 if (!set)
655 return false;
657 rhs = find_reg_equal_equiv_note (insn);
658 if (rhs)
659 rhs = XEXP (rhs, 0);
660 else
661 rhs = SET_SRC (set);
663 code = GET_CODE (rhs);
664 switch (code)
666 case SUBREG:
667 case REG:
668 next = rhs;
669 break;
671 case PLUS:
672 case MINUS:
673 op0 = XEXP (rhs, 0);
674 op1 = XEXP (rhs, 1);
676 if (code == PLUS && CONSTANT_P (op0))
677 std::swap (op0, op1);
679 if (!simple_reg_p (op0)
680 || !CONSTANT_P (op1))
681 return false;
683 if (GET_MODE (rhs) != outer_mode)
685 /* ppc64 uses expressions like
687 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
689 this is equivalent to
691 (set x':DI (plus:DI y:DI 1))
692 (set x:SI (subreg:SI (x':DI)). */
693 if (GET_CODE (op0) != SUBREG)
694 return false;
695 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
696 return false;
699 next = op0;
700 break;
702 case SIGN_EXTEND:
703 case ZERO_EXTEND:
704 if (GET_MODE (rhs) != outer_mode)
705 return false;
707 op0 = XEXP (rhs, 0);
708 if (!simple_reg_p (op0))
709 return false;
711 next = op0;
712 break;
714 default:
715 return false;
718 if (GET_CODE (next) == SUBREG)
720 if (!subreg_lowpart_p (next))
721 return false;
723 nextr = SUBREG_REG (next);
724 if (GET_MODE (nextr) != outer_mode)
725 return false;
727 else
728 nextr = next;
730 res = iv_get_reaching_def (insn, nextr, &next_def);
732 if (res == GRD_INVALID || res == GRD_INVARIANT)
733 return false;
735 if (res == GRD_MAYBE_BIV)
737 if (!rtx_equal_p (nextr, reg))
738 return false;
740 *inner_step = const0_rtx;
741 *extend = IV_UNKNOWN_EXTEND;
742 *inner_mode = outer_mode;
743 *outer_step = const0_rtx;
745 else if (!get_biv_step_1 (next_def, reg,
746 inner_step, inner_mode, extend, outer_mode,
747 outer_step))
748 return false;
750 if (GET_CODE (next) == SUBREG)
752 machine_mode amode = GET_MODE (next);
754 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
755 return false;
757 *inner_mode = amode;
758 *inner_step = simplify_gen_binary (PLUS, outer_mode,
759 *inner_step, *outer_step);
760 *outer_step = const0_rtx;
761 *extend = IV_UNKNOWN_EXTEND;
764 switch (code)
766 case REG:
767 case SUBREG:
768 break;
770 case PLUS:
771 case MINUS:
772 if (*inner_mode == outer_mode
773 /* See comment in previous switch. */
774 || GET_MODE (rhs) != outer_mode)
775 *inner_step = simplify_gen_binary (code, outer_mode,
776 *inner_step, op1);
777 else
778 *outer_step = simplify_gen_binary (code, outer_mode,
779 *outer_step, op1);
780 break;
782 case SIGN_EXTEND:
783 case ZERO_EXTEND:
784 gcc_assert (GET_MODE (op0) == *inner_mode
785 && *extend == IV_UNKNOWN_EXTEND
786 && *outer_step == const0_rtx);
788 *extend = (code == SIGN_EXTEND) ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
789 break;
791 default:
792 return false;
795 return true;
798 /* Gets the operation on register REG inside loop, in shape
800 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
802 If the operation cannot be described in this shape, return false.
803 LAST_DEF is the definition of REG that dominates loop latch. */
805 static bool
806 get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
807 machine_mode *inner_mode, enum iv_extend_code *extend,
808 machine_mode *outer_mode, rtx *outer_step)
810 *outer_mode = GET_MODE (reg);
812 if (!get_biv_step_1 (last_def, reg,
813 inner_step, inner_mode, extend, *outer_mode,
814 outer_step))
815 return false;
817 gcc_assert ((*inner_mode == *outer_mode) != (*extend != IV_UNKNOWN_EXTEND));
818 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
820 return true;
823 /* Records information that DEF is induction variable IV. */
825 static void
826 record_iv (df_ref def, struct rtx_iv *iv)
828 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
830 *recorded_iv = *iv;
831 check_iv_ref_table_size ();
832 DF_REF_IV_SET (def, recorded_iv);
835 /* If DEF was already analyzed for bivness, store the description of the biv to
836 IV and return true. Otherwise return false. */
838 static bool
839 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
841 struct biv_entry *biv = bivs->find_with_hash (def, REGNO (def));
843 if (!biv)
844 return false;
846 *iv = biv->iv;
847 return true;
850 static void
851 record_biv (rtx def, struct rtx_iv *iv)
853 struct biv_entry *biv = XNEW (struct biv_entry);
854 biv_entry **slot = bivs->find_slot_with_hash (def, REGNO (def), INSERT);
856 biv->regno = REGNO (def);
857 biv->iv = *iv;
858 gcc_assert (!*slot);
859 *slot = biv;
862 /* Determines whether DEF is a biv and if so, stores its description
863 to *IV. */
865 static bool
866 iv_analyze_biv (rtx def, struct rtx_iv *iv)
868 rtx inner_step, outer_step;
869 machine_mode inner_mode, outer_mode;
870 enum iv_extend_code extend;
871 df_ref last_def;
873 if (dump_file)
875 fprintf (dump_file, "Analyzing ");
876 print_rtl (dump_file, def);
877 fprintf (dump_file, " for bivness.\n");
880 if (!REG_P (def))
882 if (!CONSTANT_P (def))
883 return false;
885 return iv_constant (iv, def, VOIDmode);
888 if (!latch_dominating_def (def, &last_def))
890 if (dump_file)
891 fprintf (dump_file, " not simple.\n");
892 return false;
895 if (!last_def)
896 return iv_constant (iv, def, VOIDmode);
898 if (analyzed_for_bivness_p (def, iv))
900 if (dump_file)
901 fprintf (dump_file, " already analysed.\n");
902 return iv->base != NULL_RTX;
905 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
906 &outer_mode, &outer_step))
908 iv->base = NULL_RTX;
909 goto end;
912 /* Loop transforms base to es (base + inner_step) + outer_step,
913 where es means extend of subreg between inner_mode and outer_mode.
914 The corresponding induction variable is
916 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
918 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
919 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
920 iv->mode = inner_mode;
921 iv->extend_mode = outer_mode;
922 iv->extend = extend;
923 iv->mult = const1_rtx;
924 iv->delta = outer_step;
925 iv->first_special = inner_mode != outer_mode;
927 end:
928 if (dump_file)
930 fprintf (dump_file, " ");
931 dump_iv_info (dump_file, iv);
932 fprintf (dump_file, "\n");
935 record_biv (def, iv);
936 return iv->base != NULL_RTX;
939 /* Analyzes expression RHS used at INSN and stores the result to *IV.
940 The mode of the induction variable is MODE. */
942 bool
943 iv_analyze_expr (rtx_insn *insn, rtx rhs, machine_mode mode,
944 struct rtx_iv *iv)
946 rtx mby = NULL_RTX;
947 rtx op0 = NULL_RTX, op1 = NULL_RTX;
948 struct rtx_iv iv0, iv1;
949 enum rtx_code code = GET_CODE (rhs);
950 machine_mode omode = mode;
952 iv->mode = VOIDmode;
953 iv->base = NULL_RTX;
954 iv->step = NULL_RTX;
956 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
958 if (CONSTANT_P (rhs)
959 || REG_P (rhs)
960 || code == SUBREG)
962 if (!iv_analyze_op (insn, rhs, iv))
963 return false;
965 if (iv->mode == VOIDmode)
967 iv->mode = mode;
968 iv->extend_mode = mode;
971 return true;
974 switch (code)
976 case REG:
977 op0 = rhs;
978 break;
980 case SIGN_EXTEND:
981 case ZERO_EXTEND:
982 case NEG:
983 op0 = XEXP (rhs, 0);
984 omode = GET_MODE (op0);
985 break;
987 case PLUS:
988 case MINUS:
989 op0 = XEXP (rhs, 0);
990 op1 = XEXP (rhs, 1);
991 break;
993 case MULT:
994 op0 = XEXP (rhs, 0);
995 mby = XEXP (rhs, 1);
996 if (!CONSTANT_P (mby))
997 std::swap (op0, mby);
998 if (!CONSTANT_P (mby))
999 return false;
1000 break;
1002 case ASHIFT:
1003 op0 = XEXP (rhs, 0);
1004 mby = XEXP (rhs, 1);
1005 if (!CONSTANT_P (mby))
1006 return false;
1007 break;
1009 default:
1010 return false;
1013 if (op0
1014 && !iv_analyze_expr (insn, op0, omode, &iv0))
1015 return false;
1017 if (op1
1018 && !iv_analyze_expr (insn, op1, omode, &iv1))
1019 return false;
1021 switch (code)
1023 case SIGN_EXTEND:
1024 if (!iv_extend (&iv0, IV_SIGN_EXTEND, mode))
1025 return false;
1026 break;
1028 case ZERO_EXTEND:
1029 if (!iv_extend (&iv0, IV_ZERO_EXTEND, mode))
1030 return false;
1031 break;
1033 case NEG:
1034 if (!iv_neg (&iv0))
1035 return false;
1036 break;
1038 case PLUS:
1039 case MINUS:
1040 if (!iv_add (&iv0, &iv1, code))
1041 return false;
1042 break;
1044 case MULT:
1045 if (!iv_mult (&iv0, mby))
1046 return false;
1047 break;
1049 case ASHIFT:
1050 if (!iv_shift (&iv0, mby))
1051 return false;
1052 break;
1054 default:
1055 break;
1058 *iv = iv0;
1059 return iv->base != NULL_RTX;
1062 /* Analyzes iv DEF and stores the result to *IV. */
1064 static bool
1065 iv_analyze_def (df_ref def, struct rtx_iv *iv)
1067 rtx_insn *insn = DF_REF_INSN (def);
1068 rtx reg = DF_REF_REG (def);
1069 rtx set, rhs;
1071 if (dump_file)
1073 fprintf (dump_file, "Analyzing def of ");
1074 print_rtl (dump_file, reg);
1075 fprintf (dump_file, " in insn ");
1076 print_rtl_single (dump_file, insn);
1079 check_iv_ref_table_size ();
1080 if (DF_REF_IV (def))
1082 if (dump_file)
1083 fprintf (dump_file, " already analysed.\n");
1084 *iv = *DF_REF_IV (def);
1085 return iv->base != NULL_RTX;
1088 iv->mode = VOIDmode;
1089 iv->base = NULL_RTX;
1090 iv->step = NULL_RTX;
1092 if (!REG_P (reg))
1093 return false;
1095 set = single_set (insn);
1096 if (!set)
1097 return false;
1099 if (!REG_P (SET_DEST (set)))
1100 return false;
1102 gcc_assert (SET_DEST (set) == reg);
1103 rhs = find_reg_equal_equiv_note (insn);
1104 if (rhs)
1105 rhs = XEXP (rhs, 0);
1106 else
1107 rhs = SET_SRC (set);
1109 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1110 record_iv (def, iv);
1112 if (dump_file)
1114 print_rtl (dump_file, reg);
1115 fprintf (dump_file, " in insn ");
1116 print_rtl_single (dump_file, insn);
1117 fprintf (dump_file, " is ");
1118 dump_iv_info (dump_file, iv);
1119 fprintf (dump_file, "\n");
1122 return iv->base != NULL_RTX;
1125 /* Analyzes operand OP of INSN and stores the result to *IV. */
1127 static bool
1128 iv_analyze_op (rtx_insn *insn, rtx op, struct rtx_iv *iv)
1130 df_ref def = NULL;
1131 enum iv_grd_result res;
1133 if (dump_file)
1135 fprintf (dump_file, "Analyzing operand ");
1136 print_rtl (dump_file, op);
1137 fprintf (dump_file, " of insn ");
1138 print_rtl_single (dump_file, insn);
1141 if (function_invariant_p (op))
1142 res = GRD_INVARIANT;
1143 else if (GET_CODE (op) == SUBREG)
1145 if (!subreg_lowpart_p (op))
1146 return false;
1148 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1149 return false;
1151 return iv_subreg (iv, GET_MODE (op));
1153 else
1155 res = iv_get_reaching_def (insn, op, &def);
1156 if (res == GRD_INVALID)
1158 if (dump_file)
1159 fprintf (dump_file, " not simple.\n");
1160 return false;
1164 if (res == GRD_INVARIANT)
1166 iv_constant (iv, op, VOIDmode);
1168 if (dump_file)
1170 fprintf (dump_file, " ");
1171 dump_iv_info (dump_file, iv);
1172 fprintf (dump_file, "\n");
1174 return true;
1177 if (res == GRD_MAYBE_BIV)
1178 return iv_analyze_biv (op, iv);
1180 return iv_analyze_def (def, iv);
1183 /* Analyzes value VAL at INSN and stores the result to *IV. */
1185 bool
1186 iv_analyze (rtx_insn *insn, rtx val, struct rtx_iv *iv)
1188 rtx reg;
1190 /* We must find the insn in that val is used, so that we get to UD chains.
1191 Since the function is sometimes called on result of get_condition,
1192 this does not necessarily have to be directly INSN; scan also the
1193 following insns. */
1194 if (simple_reg_p (val))
1196 if (GET_CODE (val) == SUBREG)
1197 reg = SUBREG_REG (val);
1198 else
1199 reg = val;
1201 while (!df_find_use (insn, reg))
1202 insn = NEXT_INSN (insn);
1205 return iv_analyze_op (insn, val, iv);
1208 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1210 bool
1211 iv_analyze_result (rtx_insn *insn, rtx def, struct rtx_iv *iv)
1213 df_ref adef;
1215 adef = df_find_def (insn, def);
1216 if (!adef)
1217 return false;
1219 return iv_analyze_def (adef, iv);
1222 /* Checks whether definition of register REG in INSN is a basic induction
1223 variable. IV analysis must have been initialized (via a call to
1224 iv_analysis_loop_init) for this function to produce a result. */
1226 bool
1227 biv_p (rtx_insn *insn, rtx reg)
1229 struct rtx_iv iv;
1230 df_ref def, last_def;
1232 if (!simple_reg_p (reg))
1233 return false;
1235 def = df_find_def (insn, reg);
1236 gcc_assert (def != NULL);
1237 if (!latch_dominating_def (reg, &last_def))
1238 return false;
1239 if (last_def != def)
1240 return false;
1242 if (!iv_analyze_biv (reg, &iv))
1243 return false;
1245 return iv.step != const0_rtx;
1248 /* Calculates value of IV at ITERATION-th iteration. */
1251 get_iv_value (struct rtx_iv *iv, rtx iteration)
1253 rtx val;
1255 /* We would need to generate some if_then_else patterns, and so far
1256 it is not needed anywhere. */
1257 gcc_assert (!iv->first_special);
1259 if (iv->step != const0_rtx && iteration != const0_rtx)
1260 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1261 simplify_gen_binary (MULT, iv->extend_mode,
1262 iv->step, iteration));
1263 else
1264 val = iv->base;
1266 if (iv->extend_mode == iv->mode)
1267 return val;
1269 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1271 if (iv->extend == IV_UNKNOWN_EXTEND)
1272 return val;
1274 val = simplify_gen_unary (iv_extend_to_rtx_code (iv->extend),
1275 iv->extend_mode, val, iv->mode);
1276 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1277 simplify_gen_binary (MULT, iv->extend_mode,
1278 iv->mult, val));
1280 return val;
1283 /* Free the data for an induction variable analysis. */
1285 void
1286 iv_analysis_done (void)
1288 if (!clean_slate)
1290 clear_iv_info ();
1291 clean_slate = true;
1292 df_finish_pass (true);
1293 delete bivs;
1294 bivs = NULL;
1295 free (iv_ref_table);
1296 iv_ref_table = NULL;
1297 iv_ref_table_size = 0;
1301 /* Computes inverse to X modulo (1 << MOD). */
1303 static uint64_t
1304 inverse (uint64_t x, int mod)
1306 uint64_t mask =
1307 ((uint64_t) 1 << (mod - 1) << 1) - 1;
1308 uint64_t rslt = 1;
1309 int i;
1311 for (i = 0; i < mod - 1; i++)
1313 rslt = (rslt * x) & mask;
1314 x = (x * x) & mask;
1317 return rslt;
1320 /* Checks whether any register in X is in set ALT. */
1322 static bool
1323 altered_reg_used (const_rtx x, bitmap alt)
1325 subrtx_iterator::array_type array;
1326 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1328 const_rtx x = *iter;
1329 if (REG_P (x) && REGNO_REG_SET_P (alt, REGNO (x)))
1330 return true;
1332 return false;
1335 /* Marks registers altered by EXPR in set ALT. */
1337 static void
1338 mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1340 if (GET_CODE (expr) == SUBREG)
1341 expr = SUBREG_REG (expr);
1342 if (!REG_P (expr))
1343 return;
1345 SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1348 /* Checks whether RHS is simple enough to process. */
1350 static bool
1351 simple_rhs_p (rtx rhs)
1353 rtx op0, op1;
1355 if (function_invariant_p (rhs)
1356 || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1357 return true;
1359 switch (GET_CODE (rhs))
1361 case PLUS:
1362 case MINUS:
1363 case AND:
1364 op0 = XEXP (rhs, 0);
1365 op1 = XEXP (rhs, 1);
1366 /* Allow reg OP const and reg OP reg. */
1367 if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1368 && !function_invariant_p (op0))
1369 return false;
1370 if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1371 && !function_invariant_p (op1))
1372 return false;
1374 return true;
1376 case ASHIFT:
1377 case ASHIFTRT:
1378 case LSHIFTRT:
1379 case MULT:
1380 op0 = XEXP (rhs, 0);
1381 op1 = XEXP (rhs, 1);
1382 /* Allow reg OP const. */
1383 if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1384 return false;
1385 if (!function_invariant_p (op1))
1386 return false;
1388 return true;
1390 default:
1391 return false;
1395 /* If REGNO has a single definition, return its known value, otherwise return
1396 null. */
1398 static rtx
1399 find_single_def_src (unsigned int regno)
1401 df_ref adef;
1402 rtx set, src;
1404 for (;;)
1406 rtx note;
1407 adef = DF_REG_DEF_CHAIN (regno);
1408 if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1409 || DF_REF_IS_ARTIFICIAL (adef))
1410 return NULL_RTX;
1412 set = single_set (DF_REF_INSN (adef));
1413 if (set == NULL || !REG_P (SET_DEST (set))
1414 || REGNO (SET_DEST (set)) != regno)
1415 return NULL_RTX;
1417 note = find_reg_equal_equiv_note (DF_REF_INSN (adef));
1419 if (note && function_invariant_p (XEXP (note, 0)))
1421 src = XEXP (note, 0);
1422 break;
1424 src = SET_SRC (set);
1426 if (REG_P (src))
1428 regno = REGNO (src);
1429 continue;
1431 break;
1433 if (!function_invariant_p (src))
1434 return NULL_RTX;
1436 return src;
1439 /* If any registers in *EXPR that have a single definition, try to replace
1440 them with the known-equivalent values. */
1442 static void
1443 replace_single_def_regs (rtx *expr)
1445 subrtx_var_iterator::array_type array;
1446 repeat:
1447 FOR_EACH_SUBRTX_VAR (iter, array, *expr, NONCONST)
1449 rtx x = *iter;
1450 if (REG_P (x))
1451 if (rtx new_x = find_single_def_src (REGNO (x)))
1453 *expr = simplify_replace_rtx (*expr, x, new_x);
1454 goto repeat;
1459 /* A subroutine of simplify_using_initial_values, this function examines INSN
1460 to see if it contains a suitable set that we can use to make a replacement.
1461 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1462 the set; return false otherwise. */
1464 static bool
1465 suitable_set_for_replacement (rtx_insn *insn, rtx *dest, rtx *src)
1467 rtx set = single_set (insn);
1468 rtx lhs = NULL_RTX, rhs;
1470 if (!set)
1471 return false;
1473 lhs = SET_DEST (set);
1474 if (!REG_P (lhs))
1475 return false;
1477 rhs = find_reg_equal_equiv_note (insn);
1478 if (rhs)
1479 rhs = XEXP (rhs, 0);
1480 else
1481 rhs = SET_SRC (set);
1483 if (!simple_rhs_p (rhs))
1484 return false;
1486 *dest = lhs;
1487 *src = rhs;
1488 return true;
1491 /* Using the data returned by suitable_set_for_replacement, replace DEST
1492 with SRC in *EXPR and return the new expression. Also call
1493 replace_single_def_regs if the replacement changed something. */
1494 static void
1495 replace_in_expr (rtx *expr, rtx dest, rtx src)
1497 rtx old = *expr;
1498 *expr = simplify_replace_rtx (*expr, dest, src);
1499 if (old == *expr)
1500 return;
1501 replace_single_def_regs (expr);
1504 /* Checks whether A implies B. */
1506 static bool
1507 implies_p (rtx a, rtx b)
1509 rtx op0, op1, opb0, opb1;
1510 machine_mode mode;
1512 if (rtx_equal_p (a, b))
1513 return true;
1515 if (GET_CODE (a) == EQ)
1517 op0 = XEXP (a, 0);
1518 op1 = XEXP (a, 1);
1520 if (REG_P (op0)
1521 || (GET_CODE (op0) == SUBREG
1522 && REG_P (SUBREG_REG (op0))))
1524 rtx r = simplify_replace_rtx (b, op0, op1);
1525 if (r == const_true_rtx)
1526 return true;
1529 if (REG_P (op1)
1530 || (GET_CODE (op1) == SUBREG
1531 && REG_P (SUBREG_REG (op1))))
1533 rtx r = simplify_replace_rtx (b, op1, op0);
1534 if (r == const_true_rtx)
1535 return true;
1539 if (b == const_true_rtx)
1540 return true;
1542 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1543 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1544 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1545 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1546 return false;
1548 op0 = XEXP (a, 0);
1549 op1 = XEXP (a, 1);
1550 opb0 = XEXP (b, 0);
1551 opb1 = XEXP (b, 1);
1553 mode = GET_MODE (op0);
1554 if (mode != GET_MODE (opb0))
1555 mode = VOIDmode;
1556 else if (mode == VOIDmode)
1558 mode = GET_MODE (op1);
1559 if (mode != GET_MODE (opb1))
1560 mode = VOIDmode;
1563 /* A < B implies A + 1 <= B. */
1564 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1565 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1568 if (GET_CODE (a) == GT)
1569 std::swap (op0, op1);
1571 if (GET_CODE (b) == GE)
1572 std::swap (opb0, opb1);
1574 if (SCALAR_INT_MODE_P (mode)
1575 && rtx_equal_p (op1, opb1)
1576 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1577 return true;
1578 return false;
1581 /* A < B or A > B imply A != B. TODO: Likewise
1582 A + n < B implies A != B + n if neither wraps. */
1583 if (GET_CODE (b) == NE
1584 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1585 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1587 if (rtx_equal_p (op0, opb0)
1588 && rtx_equal_p (op1, opb1))
1589 return true;
1592 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1593 if (GET_CODE (a) == NE
1594 && op1 == const0_rtx)
1596 if ((GET_CODE (b) == GTU
1597 && opb1 == const0_rtx)
1598 || (GET_CODE (b) == GEU
1599 && opb1 == const1_rtx))
1600 return rtx_equal_p (op0, opb0);
1603 /* A != N is equivalent to A - (N + 1) <u -1. */
1604 if (GET_CODE (a) == NE
1605 && CONST_INT_P (op1)
1606 && GET_CODE (b) == LTU
1607 && opb1 == constm1_rtx
1608 && GET_CODE (opb0) == PLUS
1609 && CONST_INT_P (XEXP (opb0, 1))
1610 /* Avoid overflows. */
1611 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1612 != ((unsigned HOST_WIDE_INT)1
1613 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1614 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1615 return rtx_equal_p (op0, XEXP (opb0, 0));
1617 /* Likewise, A != N implies A - N > 0. */
1618 if (GET_CODE (a) == NE
1619 && CONST_INT_P (op1))
1621 if (GET_CODE (b) == GTU
1622 && GET_CODE (opb0) == PLUS
1623 && opb1 == const0_rtx
1624 && CONST_INT_P (XEXP (opb0, 1))
1625 /* Avoid overflows. */
1626 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1627 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1628 && rtx_equal_p (XEXP (opb0, 0), op0))
1629 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1630 if (GET_CODE (b) == GEU
1631 && GET_CODE (opb0) == PLUS
1632 && opb1 == const1_rtx
1633 && CONST_INT_P (XEXP (opb0, 1))
1634 /* Avoid overflows. */
1635 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1636 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1637 && rtx_equal_p (XEXP (opb0, 0), op0))
1638 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1641 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1642 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1643 && CONST_INT_P (op1)
1644 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1645 || INTVAL (op1) >= 0)
1646 && GET_CODE (b) == LTU
1647 && CONST_INT_P (opb1)
1648 && rtx_equal_p (op0, opb0))
1649 return INTVAL (opb1) < 0;
1651 return false;
1654 /* Canonicalizes COND so that
1656 (1) Ensure that operands are ordered according to
1657 swap_commutative_operands_p.
1658 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1659 for GE, GEU, and LEU. */
1662 canon_condition (rtx cond)
1664 rtx op0, op1;
1665 enum rtx_code code;
1666 machine_mode mode;
1668 code = GET_CODE (cond);
1669 op0 = XEXP (cond, 0);
1670 op1 = XEXP (cond, 1);
1672 if (swap_commutative_operands_p (op0, op1))
1674 code = swap_condition (code);
1675 std::swap (op0, op1);
1678 mode = GET_MODE (op0);
1679 if (mode == VOIDmode)
1680 mode = GET_MODE (op1);
1681 gcc_assert (mode != VOIDmode);
1683 if (CONST_SCALAR_INT_P (op1) && GET_MODE_CLASS (mode) != MODE_CC)
1685 rtx_mode_t const_val (op1, mode);
1687 switch (code)
1689 case LE:
1690 if (wi::ne_p (const_val, wi::max_value (mode, SIGNED)))
1692 code = LT;
1693 op1 = immed_wide_int_const (wi::add (const_val, 1), mode);
1695 break;
1697 case GE:
1698 if (wi::ne_p (const_val, wi::min_value (mode, SIGNED)))
1700 code = GT;
1701 op1 = immed_wide_int_const (wi::sub (const_val, 1), mode);
1703 break;
1705 case LEU:
1706 if (wi::ne_p (const_val, -1))
1708 code = LTU;
1709 op1 = immed_wide_int_const (wi::add (const_val, 1), mode);
1711 break;
1713 case GEU:
1714 if (wi::ne_p (const_val, 0))
1716 code = GTU;
1717 op1 = immed_wide_int_const (wi::sub (const_val, 1), mode);
1719 break;
1721 default:
1722 break;
1726 if (op0 != XEXP (cond, 0)
1727 || op1 != XEXP (cond, 1)
1728 || code != GET_CODE (cond)
1729 || GET_MODE (cond) != SImode)
1730 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1732 return cond;
1735 /* Reverses CONDition; returns NULL if we cannot. */
1737 static rtx
1738 reversed_condition (rtx cond)
1740 enum rtx_code reversed;
1741 reversed = reversed_comparison_code (cond, NULL);
1742 if (reversed == UNKNOWN)
1743 return NULL_RTX;
1744 else
1745 return gen_rtx_fmt_ee (reversed,
1746 GET_MODE (cond), XEXP (cond, 0),
1747 XEXP (cond, 1));
1750 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1751 set of altered regs. */
1753 void
1754 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1756 rtx rev, reve, exp = *expr;
1758 /* If some register gets altered later, we do not really speak about its
1759 value at the time of comparison. */
1760 if (altered && altered_reg_used (cond, altered))
1761 return;
1763 if (GET_CODE (cond) == EQ
1764 && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1766 *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1767 return;
1770 if (!COMPARISON_P (exp))
1771 return;
1773 rev = reversed_condition (cond);
1774 reve = reversed_condition (exp);
1776 cond = canon_condition (cond);
1777 exp = canon_condition (exp);
1778 if (rev)
1779 rev = canon_condition (rev);
1780 if (reve)
1781 reve = canon_condition (reve);
1783 if (rtx_equal_p (exp, cond))
1785 *expr = const_true_rtx;
1786 return;
1789 if (rev && rtx_equal_p (exp, rev))
1791 *expr = const0_rtx;
1792 return;
1795 if (implies_p (cond, exp))
1797 *expr = const_true_rtx;
1798 return;
1801 if (reve && implies_p (cond, reve))
1803 *expr = const0_rtx;
1804 return;
1807 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1808 be false. */
1809 if (rev && implies_p (exp, rev))
1811 *expr = const0_rtx;
1812 return;
1815 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1816 if (rev && reve && implies_p (reve, rev))
1818 *expr = const_true_rtx;
1819 return;
1822 /* We would like to have some other tests here. TODO. */
1824 return;
1827 /* Use relationship between A and *B to eventually eliminate *B.
1828 OP is the operation we consider. */
1830 static void
1831 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1833 switch (op)
1835 case AND:
1836 /* If A implies *B, we may replace *B by true. */
1837 if (implies_p (a, *b))
1838 *b = const_true_rtx;
1839 break;
1841 case IOR:
1842 /* If *B implies A, we may replace *B by false. */
1843 if (implies_p (*b, a))
1844 *b = const0_rtx;
1845 break;
1847 default:
1848 gcc_unreachable ();
1852 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1853 operation we consider. */
1855 static void
1856 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1858 rtx elt;
1860 for (elt = tail; elt; elt = XEXP (elt, 1))
1861 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1862 for (elt = tail; elt; elt = XEXP (elt, 1))
1863 eliminate_implied_condition (op, XEXP (elt, 0), head);
1866 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1867 is a list, its elements are assumed to be combined using OP. */
1869 static void
1870 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1872 bool expression_valid;
1873 rtx head, tail, last_valid_expr;
1874 rtx_expr_list *cond_list;
1875 rtx_insn *insn;
1876 rtx neutral, aggr;
1877 regset altered, this_altered;
1878 edge e;
1880 if (!*expr)
1881 return;
1883 if (CONSTANT_P (*expr))
1884 return;
1886 if (GET_CODE (*expr) == EXPR_LIST)
1888 head = XEXP (*expr, 0);
1889 tail = XEXP (*expr, 1);
1891 eliminate_implied_conditions (op, &head, tail);
1893 switch (op)
1895 case AND:
1896 neutral = const_true_rtx;
1897 aggr = const0_rtx;
1898 break;
1900 case IOR:
1901 neutral = const0_rtx;
1902 aggr = const_true_rtx;
1903 break;
1905 default:
1906 gcc_unreachable ();
1909 simplify_using_initial_values (loop, UNKNOWN, &head);
1910 if (head == aggr)
1912 XEXP (*expr, 0) = aggr;
1913 XEXP (*expr, 1) = NULL_RTX;
1914 return;
1916 else if (head == neutral)
1918 *expr = tail;
1919 simplify_using_initial_values (loop, op, expr);
1920 return;
1922 simplify_using_initial_values (loop, op, &tail);
1924 if (tail && XEXP (tail, 0) == aggr)
1926 *expr = tail;
1927 return;
1930 XEXP (*expr, 0) = head;
1931 XEXP (*expr, 1) = tail;
1932 return;
1935 gcc_assert (op == UNKNOWN);
1937 replace_single_def_regs (expr);
1938 if (CONSTANT_P (*expr))
1939 return;
1941 e = loop_preheader_edge (loop);
1942 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1943 return;
1945 altered = ALLOC_REG_SET (&reg_obstack);
1946 this_altered = ALLOC_REG_SET (&reg_obstack);
1948 expression_valid = true;
1949 last_valid_expr = *expr;
1950 cond_list = NULL;
1951 while (1)
1953 insn = BB_END (e->src);
1954 if (any_condjump_p (insn))
1956 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1958 if (cond && (e->flags & EDGE_FALLTHRU))
1959 cond = reversed_condition (cond);
1960 if (cond)
1962 rtx old = *expr;
1963 simplify_using_condition (cond, expr, altered);
1964 if (old != *expr)
1966 rtx note;
1967 if (CONSTANT_P (*expr))
1968 goto out;
1969 for (note = cond_list; note; note = XEXP (note, 1))
1971 simplify_using_condition (XEXP (note, 0), expr, altered);
1972 if (CONSTANT_P (*expr))
1973 goto out;
1976 cond_list = alloc_EXPR_LIST (0, cond, cond_list);
1980 FOR_BB_INSNS_REVERSE (e->src, insn)
1982 rtx src, dest;
1983 rtx old = *expr;
1985 if (!INSN_P (insn))
1986 continue;
1988 CLEAR_REG_SET (this_altered);
1989 note_stores (PATTERN (insn), mark_altered, this_altered);
1990 if (CALL_P (insn))
1992 /* Kill all call clobbered registers. */
1993 unsigned int i;
1994 hard_reg_set_iterator hrsi;
1995 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call,
1996 0, i, hrsi)
1997 SET_REGNO_REG_SET (this_altered, i);
2000 if (suitable_set_for_replacement (insn, &dest, &src))
2002 rtx_expr_list **pnote, **pnote_next;
2004 replace_in_expr (expr, dest, src);
2005 if (CONSTANT_P (*expr))
2006 goto out;
2008 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2010 rtx_expr_list *note = *pnote;
2011 rtx old_cond = XEXP (note, 0);
2013 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2014 replace_in_expr (&XEXP (note, 0), dest, src);
2016 /* We can no longer use a condition that has been simplified
2017 to a constant, and simplify_using_condition will abort if
2018 we try. */
2019 if (CONSTANT_P (XEXP (note, 0)))
2021 *pnote = *pnote_next;
2022 pnote_next = pnote;
2023 free_EXPR_LIST_node (note);
2025 /* Retry simplifications with this condition if either the
2026 expression or the condition changed. */
2027 else if (old_cond != XEXP (note, 0) || old != *expr)
2028 simplify_using_condition (XEXP (note, 0), expr, altered);
2031 else
2033 rtx_expr_list **pnote, **pnote_next;
2035 /* If we did not use this insn to make a replacement, any overlap
2036 between stores in this insn and our expression will cause the
2037 expression to become invalid. */
2038 if (altered_reg_used (*expr, this_altered))
2039 goto out;
2041 /* Likewise for the conditions. */
2042 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2044 rtx_expr_list *note = *pnote;
2045 rtx old_cond = XEXP (note, 0);
2047 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2048 if (altered_reg_used (old_cond, this_altered))
2050 *pnote = *pnote_next;
2051 pnote_next = pnote;
2052 free_EXPR_LIST_node (note);
2057 if (CONSTANT_P (*expr))
2058 goto out;
2060 IOR_REG_SET (altered, this_altered);
2062 /* If the expression now contains regs that have been altered, we
2063 can't return it to the caller. However, it is still valid for
2064 further simplification, so keep searching to see if we can
2065 eventually turn it into a constant. */
2066 if (altered_reg_used (*expr, altered))
2067 expression_valid = false;
2068 if (expression_valid)
2069 last_valid_expr = *expr;
2072 if (!single_pred_p (e->src)
2073 || single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2074 break;
2075 e = single_pred_edge (e->src);
2078 out:
2079 free_EXPR_LIST_list (&cond_list);
2080 if (!CONSTANT_P (*expr))
2081 *expr = last_valid_expr;
2082 FREE_REG_SET (altered);
2083 FREE_REG_SET (this_altered);
2086 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
2087 that IV occurs as left operands of comparison COND and its signedness
2088 is SIGNED_P to DESC. */
2090 static void
2091 shorten_into_mode (struct rtx_iv *iv, machine_mode mode,
2092 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2094 rtx mmin, mmax, cond_over, cond_under;
2096 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2097 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2098 iv->base, mmin);
2099 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2100 iv->base, mmax);
2102 switch (cond)
2104 case LE:
2105 case LT:
2106 case LEU:
2107 case LTU:
2108 if (cond_under != const0_rtx)
2109 desc->infinite =
2110 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2111 if (cond_over != const0_rtx)
2112 desc->noloop_assumptions =
2113 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2114 break;
2116 case GE:
2117 case GT:
2118 case GEU:
2119 case GTU:
2120 if (cond_over != const0_rtx)
2121 desc->infinite =
2122 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2123 if (cond_under != const0_rtx)
2124 desc->noloop_assumptions =
2125 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2126 break;
2128 case NE:
2129 if (cond_over != const0_rtx)
2130 desc->infinite =
2131 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2132 if (cond_under != const0_rtx)
2133 desc->infinite =
2134 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2135 break;
2137 default:
2138 gcc_unreachable ();
2141 iv->mode = mode;
2142 iv->extend = signed_p ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
2145 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2146 subregs of the same mode if possible (sometimes it is necessary to add
2147 some assumptions to DESC). */
2149 static bool
2150 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2151 enum rtx_code cond, struct niter_desc *desc)
2153 machine_mode comp_mode;
2154 bool signed_p;
2156 /* If the ivs behave specially in the first iteration, or are
2157 added/multiplied after extending, we ignore them. */
2158 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2159 return false;
2160 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2161 return false;
2163 /* If there is some extend, it must match signedness of the comparison. */
2164 switch (cond)
2166 case LE:
2167 case LT:
2168 if (iv0->extend == IV_ZERO_EXTEND
2169 || iv1->extend == IV_ZERO_EXTEND)
2170 return false;
2171 signed_p = true;
2172 break;
2174 case LEU:
2175 case LTU:
2176 if (iv0->extend == IV_SIGN_EXTEND
2177 || iv1->extend == IV_SIGN_EXTEND)
2178 return false;
2179 signed_p = false;
2180 break;
2182 case NE:
2183 if (iv0->extend != IV_UNKNOWN_EXTEND
2184 && iv1->extend != IV_UNKNOWN_EXTEND
2185 && iv0->extend != iv1->extend)
2186 return false;
2188 signed_p = false;
2189 if (iv0->extend != IV_UNKNOWN_EXTEND)
2190 signed_p = iv0->extend == IV_SIGN_EXTEND;
2191 if (iv1->extend != IV_UNKNOWN_EXTEND)
2192 signed_p = iv1->extend == IV_SIGN_EXTEND;
2193 break;
2195 default:
2196 gcc_unreachable ();
2199 /* Values of both variables should be computed in the same mode. These
2200 might indeed be different, if we have comparison like
2202 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2204 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2205 in different modes. This does not seem impossible to handle, but
2206 it hardly ever occurs in practice.
2208 The only exception is the case when one of operands is invariant.
2209 For example pentium 3 generates comparisons like
2210 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2211 definitely do not want this prevent the optimization. */
2212 comp_mode = iv0->extend_mode;
2213 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2214 comp_mode = iv1->extend_mode;
2216 if (iv0->extend_mode != comp_mode)
2218 if (iv0->mode != iv0->extend_mode
2219 || iv0->step != const0_rtx)
2220 return false;
2222 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2223 comp_mode, iv0->base, iv0->mode);
2224 iv0->extend_mode = comp_mode;
2227 if (iv1->extend_mode != comp_mode)
2229 if (iv1->mode != iv1->extend_mode
2230 || iv1->step != const0_rtx)
2231 return false;
2233 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2234 comp_mode, iv1->base, iv1->mode);
2235 iv1->extend_mode = comp_mode;
2238 /* Check that both ivs belong to a range of a single mode. If one of the
2239 operands is an invariant, we may need to shorten it into the common
2240 mode. */
2241 if (iv0->mode == iv0->extend_mode
2242 && iv0->step == const0_rtx
2243 && iv0->mode != iv1->mode)
2244 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2246 if (iv1->mode == iv1->extend_mode
2247 && iv1->step == const0_rtx
2248 && iv0->mode != iv1->mode)
2249 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2251 if (iv0->mode != iv1->mode)
2252 return false;
2254 desc->mode = iv0->mode;
2255 desc->signed_p = signed_p;
2257 return true;
2260 /* Tries to estimate the maximum number of iterations in LOOP, and return the
2261 result. This function is called from iv_number_of_iterations with
2262 a number of fields in DESC already filled in. OLD_NITER is the original
2263 expression for the number of iterations, before we tried to simplify it. */
2265 static uint64_t
2266 determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2268 rtx niter = desc->niter_expr;
2269 rtx mmin, mmax, cmp;
2270 uint64_t nmax, inc;
2271 uint64_t andmax = 0;
2273 /* We used to look for constant operand 0 of AND,
2274 but canonicalization should always make this impossible. */
2275 gcc_checking_assert (GET_CODE (niter) != AND
2276 || !CONST_INT_P (XEXP (niter, 0)));
2278 if (GET_CODE (niter) == AND
2279 && CONST_INT_P (XEXP (niter, 1)))
2281 andmax = UINTVAL (XEXP (niter, 1));
2282 niter = XEXP (niter, 0);
2285 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2286 nmax = UINTVAL (mmax) - UINTVAL (mmin);
2288 if (GET_CODE (niter) == UDIV)
2290 if (!CONST_INT_P (XEXP (niter, 1)))
2291 return nmax;
2292 inc = INTVAL (XEXP (niter, 1));
2293 niter = XEXP (niter, 0);
2295 else
2296 inc = 1;
2298 /* We could use a binary search here, but for now improving the upper
2299 bound by just one eliminates one important corner case. */
2300 cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2301 desc->mode, old_niter, mmax);
2302 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2303 if (cmp == const_true_rtx)
2305 nmax--;
2307 if (dump_file)
2308 fprintf (dump_file, ";; improved upper bound by one.\n");
2310 nmax /= inc;
2311 if (andmax)
2312 nmax = MIN (nmax, andmax);
2313 if (dump_file)
2314 fprintf (dump_file, ";; Determined upper bound %" PRId64".\n",
2315 nmax);
2316 return nmax;
2319 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2320 the result into DESC. Very similar to determine_number_of_iterations
2321 (basically its rtl version), complicated by things like subregs. */
2323 static void
2324 iv_number_of_iterations (struct loop *loop, rtx_insn *insn, rtx condition,
2325 struct niter_desc *desc)
2327 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2328 struct rtx_iv iv0, iv1;
2329 rtx assumption, may_not_xform;
2330 enum rtx_code cond;
2331 machine_mode mode, comp_mode;
2332 rtx mmin, mmax, mode_mmin, mode_mmax;
2333 uint64_t s, size, d, inv, max, up, down;
2334 int64_t inc, step_val;
2335 int was_sharp = false;
2336 rtx old_niter;
2337 bool step_is_pow2;
2339 /* The meaning of these assumptions is this:
2340 if !assumptions
2341 then the rest of information does not have to be valid
2342 if noloop_assumptions then the loop does not roll
2343 if infinite then this exit is never used */
2345 desc->assumptions = NULL_RTX;
2346 desc->noloop_assumptions = NULL_RTX;
2347 desc->infinite = NULL_RTX;
2348 desc->simple_p = true;
2350 desc->const_iter = false;
2351 desc->niter_expr = NULL_RTX;
2353 cond = GET_CODE (condition);
2354 gcc_assert (COMPARISON_P (condition));
2356 mode = GET_MODE (XEXP (condition, 0));
2357 if (mode == VOIDmode)
2358 mode = GET_MODE (XEXP (condition, 1));
2359 /* The constant comparisons should be folded. */
2360 gcc_assert (mode != VOIDmode);
2362 /* We only handle integers or pointers. */
2363 if (GET_MODE_CLASS (mode) != MODE_INT
2364 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2365 goto fail;
2367 op0 = XEXP (condition, 0);
2368 if (!iv_analyze (insn, op0, &iv0))
2369 goto fail;
2370 if (iv0.extend_mode == VOIDmode)
2371 iv0.mode = iv0.extend_mode = mode;
2373 op1 = XEXP (condition, 1);
2374 if (!iv_analyze (insn, op1, &iv1))
2375 goto fail;
2376 if (iv1.extend_mode == VOIDmode)
2377 iv1.mode = iv1.extend_mode = mode;
2379 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2380 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2381 goto fail;
2383 /* Check condition and normalize it. */
2385 switch (cond)
2387 case GE:
2388 case GT:
2389 case GEU:
2390 case GTU:
2391 std::swap (iv0, iv1);
2392 cond = swap_condition (cond);
2393 break;
2394 case NE:
2395 case LE:
2396 case LEU:
2397 case LT:
2398 case LTU:
2399 break;
2400 default:
2401 goto fail;
2404 /* Handle extends. This is relatively nontrivial, so we only try in some
2405 easy cases, when we can canonicalize the ivs (possibly by adding some
2406 assumptions) to shape subreg (base + i * step). This function also fills
2407 in desc->mode and desc->signed_p. */
2409 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2410 goto fail;
2412 comp_mode = iv0.extend_mode;
2413 mode = iv0.mode;
2414 size = GET_MODE_PRECISION (mode);
2415 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2416 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2417 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2419 if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
2420 goto fail;
2422 /* We can take care of the case of two induction variables chasing each other
2423 if the test is NE. I have never seen a loop using it, but still it is
2424 cool. */
2425 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2427 if (cond != NE)
2428 goto fail;
2430 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2431 iv1.step = const0_rtx;
2434 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2435 iv1.step = lowpart_subreg (mode, iv1.step, comp_mode);
2437 /* This is either infinite loop or the one that ends immediately, depending
2438 on initial values. Unswitching should remove this kind of conditions. */
2439 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2440 goto fail;
2442 if (cond != NE)
2444 if (iv0.step == const0_rtx)
2445 step_val = -INTVAL (iv1.step);
2446 else
2447 step_val = INTVAL (iv0.step);
2449 /* Ignore loops of while (i-- < 10) type. */
2450 if (step_val < 0)
2451 goto fail;
2453 step_is_pow2 = !(step_val & (step_val - 1));
2455 else
2457 /* We do not care about whether the step is power of two in this
2458 case. */
2459 step_is_pow2 = false;
2460 step_val = 0;
2463 /* Some more condition normalization. We must record some assumptions
2464 due to overflows. */
2465 switch (cond)
2467 case LT:
2468 case LTU:
2469 /* We want to take care only of non-sharp relationals; this is easy,
2470 as in cases the overflow would make the transformation unsafe
2471 the loop does not roll. Seemingly it would make more sense to want
2472 to take care of sharp relationals instead, as NE is more similar to
2473 them, but the problem is that here the transformation would be more
2474 difficult due to possibly infinite loops. */
2475 if (iv0.step == const0_rtx)
2477 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2478 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2479 mode_mmax);
2480 if (assumption == const_true_rtx)
2481 goto zero_iter_simplify;
2482 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2483 iv0.base, const1_rtx);
2485 else
2487 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2488 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2489 mode_mmin);
2490 if (assumption == const_true_rtx)
2491 goto zero_iter_simplify;
2492 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2493 iv1.base, constm1_rtx);
2496 if (assumption != const0_rtx)
2497 desc->noloop_assumptions =
2498 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2499 cond = (cond == LT) ? LE : LEU;
2501 /* It will be useful to be able to tell the difference once more in
2502 LE -> NE reduction. */
2503 was_sharp = true;
2504 break;
2505 default: ;
2508 /* Take care of trivially infinite loops. */
2509 if (cond != NE)
2511 if (iv0.step == const0_rtx)
2513 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2514 if (rtx_equal_p (tmp, mode_mmin))
2516 desc->infinite =
2517 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2518 /* Fill in the remaining fields somehow. */
2519 goto zero_iter_simplify;
2522 else
2524 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2525 if (rtx_equal_p (tmp, mode_mmax))
2527 desc->infinite =
2528 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2529 /* Fill in the remaining fields somehow. */
2530 goto zero_iter_simplify;
2535 /* If we can we want to take care of NE conditions instead of size
2536 comparisons, as they are much more friendly (most importantly
2537 this takes care of special handling of loops with step 1). We can
2538 do it if we first check that upper bound is greater or equal to
2539 lower bound, their difference is constant c modulo step and that
2540 there is not an overflow. */
2541 if (cond != NE)
2543 if (iv0.step == const0_rtx)
2544 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2545 else
2546 step = iv0.step;
2547 step = lowpart_subreg (mode, step, comp_mode);
2548 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2549 delta = lowpart_subreg (mode, delta, comp_mode);
2550 delta = simplify_gen_binary (UMOD, mode, delta, step);
2551 may_xform = const0_rtx;
2552 may_not_xform = const_true_rtx;
2554 if (CONST_INT_P (delta))
2556 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2558 /* A special case. We have transformed condition of type
2559 for (i = 0; i < 4; i += 4)
2560 into
2561 for (i = 0; i <= 3; i += 4)
2562 obviously if the test for overflow during that transformation
2563 passed, we cannot overflow here. Most importantly any
2564 loop with sharp end condition and step 1 falls into this
2565 category, so handling this case specially is definitely
2566 worth the troubles. */
2567 may_xform = const_true_rtx;
2569 else if (iv0.step == const0_rtx)
2571 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2572 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2573 bound = lowpart_subreg (mode, bound, comp_mode);
2574 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2575 may_xform = simplify_gen_relational (cond, SImode, mode,
2576 bound, tmp);
2577 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2578 SImode, mode,
2579 bound, tmp);
2581 else
2583 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2584 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2585 bound = lowpart_subreg (mode, bound, comp_mode);
2586 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2587 may_xform = simplify_gen_relational (cond, SImode, mode,
2588 tmp, bound);
2589 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2590 SImode, mode,
2591 tmp, bound);
2595 if (may_xform != const0_rtx)
2597 /* We perform the transformation always provided that it is not
2598 completely senseless. This is OK, as we would need this assumption
2599 to determine the number of iterations anyway. */
2600 if (may_xform != const_true_rtx)
2602 /* If the step is a power of two and the final value we have
2603 computed overflows, the cycle is infinite. Otherwise it
2604 is nontrivial to compute the number of iterations. */
2605 if (step_is_pow2)
2606 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2607 desc->infinite);
2608 else
2609 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2610 desc->assumptions);
2613 /* We are going to lose some information about upper bound on
2614 number of iterations in this step, so record the information
2615 here. */
2616 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2617 if (CONST_INT_P (iv1.base))
2618 up = INTVAL (iv1.base);
2619 else
2620 up = INTVAL (mode_mmax) - inc;
2621 down = INTVAL (CONST_INT_P (iv0.base)
2622 ? iv0.base
2623 : mode_mmin);
2624 max = (up - down) / inc + 1;
2625 if (!desc->infinite
2626 && !desc->assumptions)
2627 record_niter_bound (loop, max, false, true);
2629 if (iv0.step == const0_rtx)
2631 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2632 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2634 else
2636 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2637 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2640 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2641 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2642 assumption = simplify_gen_relational (reverse_condition (cond),
2643 SImode, mode, tmp0, tmp1);
2644 if (assumption == const_true_rtx)
2645 goto zero_iter_simplify;
2646 else if (assumption != const0_rtx)
2647 desc->noloop_assumptions =
2648 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2649 cond = NE;
2653 /* Count the number of iterations. */
2654 if (cond == NE)
2656 /* Everything we do here is just arithmetics modulo size of mode. This
2657 makes us able to do more involved computations of number of iterations
2658 than in other cases. First transform the condition into shape
2659 s * i <> c, with s positive. */
2660 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2661 iv0.base = const0_rtx;
2662 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2663 iv1.step = const0_rtx;
2664 if (INTVAL (iv0.step) < 0)
2666 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, comp_mode);
2667 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, comp_mode);
2669 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2671 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2672 is infinite. Otherwise, the number of iterations is
2673 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2674 s = INTVAL (iv0.step); d = 1;
2675 while (s % 2 != 1)
2677 s /= 2;
2678 d *= 2;
2679 size--;
2681 bound = GEN_INT (((uint64_t) 1 << (size - 1 ) << 1) - 1);
2683 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2684 tmp = simplify_gen_binary (UMOD, mode, tmp1, gen_int_mode (d, mode));
2685 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2686 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2688 tmp = simplify_gen_binary (UDIV, mode, tmp1, gen_int_mode (d, mode));
2689 inv = inverse (s, size);
2690 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2691 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2693 else
2695 if (iv1.step == const0_rtx)
2696 /* Condition in shape a + s * i <= b
2697 We must know that b + s does not overflow and a <= b + s and then we
2698 can compute number of iterations as (b + s - a) / s. (It might
2699 seem that we in fact could be more clever about testing the b + s
2700 overflow condition using some information about b - a mod s,
2701 but it was already taken into account during LE -> NE transform). */
2703 step = iv0.step;
2704 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2705 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2707 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2708 lowpart_subreg (mode, step,
2709 comp_mode));
2710 if (step_is_pow2)
2712 rtx t0, t1;
2714 /* If s is power of 2, we know that the loop is infinite if
2715 a % s <= b % s and b + s overflows. */
2716 assumption = simplify_gen_relational (reverse_condition (cond),
2717 SImode, mode,
2718 tmp1, bound);
2720 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2721 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2722 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2723 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2724 desc->infinite =
2725 alloc_EXPR_LIST (0, assumption, desc->infinite);
2727 else
2729 assumption = simplify_gen_relational (cond, SImode, mode,
2730 tmp1, bound);
2731 desc->assumptions =
2732 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2735 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2736 tmp = lowpart_subreg (mode, tmp, comp_mode);
2737 assumption = simplify_gen_relational (reverse_condition (cond),
2738 SImode, mode, tmp0, tmp);
2740 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2741 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2743 else
2745 /* Condition in shape a <= b - s * i
2746 We must know that a - s does not overflow and a - s <= b and then
2747 we can again compute number of iterations as (b - (a - s)) / s. */
2748 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2749 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2750 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2752 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2753 lowpart_subreg (mode, step, comp_mode));
2754 if (step_is_pow2)
2756 rtx t0, t1;
2758 /* If s is power of 2, we know that the loop is infinite if
2759 a % s <= b % s and a - s overflows. */
2760 assumption = simplify_gen_relational (reverse_condition (cond),
2761 SImode, mode,
2762 bound, tmp0);
2764 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2765 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2766 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2767 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2768 desc->infinite =
2769 alloc_EXPR_LIST (0, assumption, desc->infinite);
2771 else
2773 assumption = simplify_gen_relational (cond, SImode, mode,
2774 bound, tmp0);
2775 desc->assumptions =
2776 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2779 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2780 tmp = lowpart_subreg (mode, tmp, comp_mode);
2781 assumption = simplify_gen_relational (reverse_condition (cond),
2782 SImode, mode,
2783 tmp, tmp1);
2784 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2785 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2787 if (assumption == const_true_rtx)
2788 goto zero_iter_simplify;
2789 else if (assumption != const0_rtx)
2790 desc->noloop_assumptions =
2791 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2792 delta = simplify_gen_binary (UDIV, mode, delta, step);
2793 desc->niter_expr = delta;
2796 old_niter = desc->niter_expr;
2798 simplify_using_initial_values (loop, AND, &desc->assumptions);
2799 if (desc->assumptions
2800 && XEXP (desc->assumptions, 0) == const0_rtx)
2801 goto fail;
2802 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2803 simplify_using_initial_values (loop, IOR, &desc->infinite);
2804 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2806 /* Rerun the simplification. Consider code (created by copying loop headers)
2808 i = 0;
2810 if (0 < n)
2814 i++;
2815 } while (i < n);
2818 The first pass determines that i = 0, the second pass uses it to eliminate
2819 noloop assumption. */
2821 simplify_using_initial_values (loop, AND, &desc->assumptions);
2822 if (desc->assumptions
2823 && XEXP (desc->assumptions, 0) == const0_rtx)
2824 goto fail;
2825 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2826 simplify_using_initial_values (loop, IOR, &desc->infinite);
2827 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2829 if (desc->noloop_assumptions
2830 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2831 goto zero_iter;
2833 if (CONST_INT_P (desc->niter_expr))
2835 uint64_t val = INTVAL (desc->niter_expr);
2837 desc->const_iter = true;
2838 desc->niter = val & GET_MODE_MASK (desc->mode);
2839 if (!desc->infinite
2840 && !desc->assumptions)
2841 record_niter_bound (loop, desc->niter, false, true);
2843 else
2845 max = determine_max_iter (loop, desc, old_niter);
2846 if (!max)
2847 goto zero_iter_simplify;
2848 if (!desc->infinite
2849 && !desc->assumptions)
2850 record_niter_bound (loop, max, false, true);
2852 /* simplify_using_initial_values does a copy propagation on the registers
2853 in the expression for the number of iterations. This prolongs life
2854 ranges of registers and increases register pressure, and usually
2855 brings no gain (and if it happens to do, the cse pass will take care
2856 of it anyway). So prevent this behavior, unless it enabled us to
2857 derive that the number of iterations is a constant. */
2858 desc->niter_expr = old_niter;
2861 return;
2863 zero_iter_simplify:
2864 /* Simplify the assumptions. */
2865 simplify_using_initial_values (loop, AND, &desc->assumptions);
2866 if (desc->assumptions
2867 && XEXP (desc->assumptions, 0) == const0_rtx)
2868 goto fail;
2869 simplify_using_initial_values (loop, IOR, &desc->infinite);
2871 /* Fallthru. */
2872 zero_iter:
2873 desc->const_iter = true;
2874 desc->niter = 0;
2875 record_niter_bound (loop, 0, true, true);
2876 desc->noloop_assumptions = NULL_RTX;
2877 desc->niter_expr = const0_rtx;
2878 return;
2880 fail:
2881 desc->simple_p = false;
2882 return;
2885 /* Checks whether E is a simple exit from LOOP and stores its description
2886 into DESC. */
2888 static void
2889 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2891 basic_block exit_bb;
2892 rtx condition;
2893 rtx_insn *at;
2894 edge ein;
2896 exit_bb = e->src;
2897 desc->simple_p = false;
2899 /* It must belong directly to the loop. */
2900 if (exit_bb->loop_father != loop)
2901 return;
2903 /* It must be tested (at least) once during any iteration. */
2904 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2905 return;
2907 /* It must end in a simple conditional jump. */
2908 if (!any_condjump_p (BB_END (exit_bb)))
2909 return;
2911 ein = EDGE_SUCC (exit_bb, 0);
2912 if (ein == e)
2913 ein = EDGE_SUCC (exit_bb, 1);
2915 desc->out_edge = e;
2916 desc->in_edge = ein;
2918 /* Test whether the condition is suitable. */
2919 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2920 return;
2922 if (ein->flags & EDGE_FALLTHRU)
2924 condition = reversed_condition (condition);
2925 if (!condition)
2926 return;
2929 /* Check that we are able to determine number of iterations and fill
2930 in information about it. */
2931 iv_number_of_iterations (loop, at, condition, desc);
2934 /* Finds a simple exit of LOOP and stores its description into DESC. */
2936 void
2937 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2939 unsigned i;
2940 basic_block *body;
2941 edge e;
2942 struct niter_desc act;
2943 bool any = false;
2944 edge_iterator ei;
2946 desc->simple_p = false;
2947 body = get_loop_body (loop);
2949 for (i = 0; i < loop->num_nodes; i++)
2951 FOR_EACH_EDGE (e, ei, body[i]->succs)
2953 if (flow_bb_inside_loop_p (loop, e->dest))
2954 continue;
2956 check_simple_exit (loop, e, &act);
2957 if (!act.simple_p)
2958 continue;
2960 if (!any)
2961 any = true;
2962 else
2964 /* Prefer constant iterations; the less the better. */
2965 if (!act.const_iter
2966 || (desc->const_iter && act.niter >= desc->niter))
2967 continue;
2969 /* Also if the actual exit may be infinite, while the old one
2970 not, prefer the old one. */
2971 if (act.infinite && !desc->infinite)
2972 continue;
2975 *desc = act;
2979 if (dump_file)
2981 if (desc->simple_p)
2983 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2984 fprintf (dump_file, " simple exit %d -> %d\n",
2985 desc->out_edge->src->index,
2986 desc->out_edge->dest->index);
2987 if (desc->assumptions)
2989 fprintf (dump_file, " assumptions: ");
2990 print_rtl (dump_file, desc->assumptions);
2991 fprintf (dump_file, "\n");
2993 if (desc->noloop_assumptions)
2995 fprintf (dump_file, " does not roll if: ");
2996 print_rtl (dump_file, desc->noloop_assumptions);
2997 fprintf (dump_file, "\n");
2999 if (desc->infinite)
3001 fprintf (dump_file, " infinite if: ");
3002 print_rtl (dump_file, desc->infinite);
3003 fprintf (dump_file, "\n");
3006 fprintf (dump_file, " number of iterations: ");
3007 print_rtl (dump_file, desc->niter_expr);
3008 fprintf (dump_file, "\n");
3010 fprintf (dump_file, " upper bound: %li\n",
3011 (long)get_max_loop_iterations_int (loop));
3012 fprintf (dump_file, " realistic bound: %li\n",
3013 (long)get_estimated_loop_iterations_int (loop));
3015 else
3016 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
3019 free (body);
3022 /* Creates a simple loop description of LOOP if it was not computed
3023 already. */
3025 struct niter_desc *
3026 get_simple_loop_desc (struct loop *loop)
3028 struct niter_desc *desc = simple_loop_desc (loop);
3030 if (desc)
3031 return desc;
3033 /* At least desc->infinite is not always initialized by
3034 find_simple_loop_exit. */
3035 desc = ggc_cleared_alloc<niter_desc> ();
3036 iv_analysis_loop_init (loop);
3037 find_simple_exit (loop, desc);
3038 loop->simple_loop_desc = desc;
3040 if (desc->simple_p && (desc->assumptions || desc->infinite))
3042 const char *wording;
3044 /* Assume that no overflow happens and that the loop is finite.
3045 We already warned at the tree level if we ran optimizations there. */
3046 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
3048 if (desc->infinite)
3050 wording =
3051 flag_unsafe_loop_optimizations
3052 ? N_("assuming that the loop is not infinite")
3053 : N_("cannot optimize possibly infinite loops");
3054 warning (OPT_Wunsafe_loop_optimizations, "%s",
3055 gettext (wording));
3057 if (desc->assumptions)
3059 wording =
3060 flag_unsafe_loop_optimizations
3061 ? N_("assuming that the loop counter does not overflow")
3062 : N_("cannot optimize loop, the loop counter may overflow");
3063 warning (OPT_Wunsafe_loop_optimizations, "%s",
3064 gettext (wording));
3068 if (flag_unsafe_loop_optimizations)
3070 desc->assumptions = NULL_RTX;
3071 desc->infinite = NULL_RTX;
3075 return desc;
3078 /* Releases simple loop description for LOOP. */
3080 void
3081 free_simple_loop_desc (struct loop *loop)
3083 struct niter_desc *desc = simple_loop_desc (loop);
3085 if (!desc)
3086 return;
3088 ggc_free (desc);
3089 loop->simple_loop_desc = NULL;