re PR rtl-optimization/63620 (RELOAD lost SET_GOT dependency on Darwin)
[official-gcc.git] / gcc / loop-iv.c
blob8ea458c3fc53c08f066f64ef3360c147017128b0
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is a simple analysis of induction variables of the loop. The major use
21 is for determining the number of iterations of a loop for loop unrolling,
22 doloop optimization and branch prediction. The iv information is computed
23 on demand.
25 Induction variables are analyzed by walking the use-def chains. When
26 a basic induction variable (biv) is found, it is cached in the bivs
27 hash table. When register is proved to be a biv, its description
28 is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "predict.h"
58 #include "vec.h"
59 #include "hashtab.h"
60 #include "hash-set.h"
61 #include "machmode.h"
62 #include "input.h"
63 #include "function.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "cfgloop.h"
68 #include "expr.h"
69 #include "intl.h"
70 #include "diagnostic-core.h"
71 #include "df.h"
72 #include "hash-table.h"
73 #include "dumpfile.h"
74 #include "rtl-iter.h"
76 /* Possible return values of iv_get_reaching_def. */
78 enum iv_grd_result
80 /* More than one reaching def, or reaching def that does not
81 dominate the use. */
82 GRD_INVALID,
84 /* The use is trivial invariant of the loop, i.e. is not changed
85 inside the loop. */
86 GRD_INVARIANT,
88 /* The use is reached by initial value and a value from the
89 previous iteration. */
90 GRD_MAYBE_BIV,
92 /* The use has single dominating def. */
93 GRD_SINGLE_DOM
96 /* Information about a biv. */
98 struct biv_entry
100 unsigned regno; /* The register of the biv. */
101 struct rtx_iv iv; /* Value of the biv. */
104 static bool clean_slate = true;
106 static unsigned int iv_ref_table_size = 0;
108 /* Table of rtx_ivs indexed by the df_ref uid field. */
109 static struct rtx_iv ** iv_ref_table;
111 /* Induction variable stored at the reference. */
112 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID (REF)]
113 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID (REF)] = (IV)
115 /* The current loop. */
117 static struct loop *current_loop;
119 /* Hashtable helper. */
121 struct biv_entry_hasher : typed_free_remove <biv_entry>
123 typedef biv_entry value_type;
124 typedef rtx_def compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
129 /* Returns hash value for biv B. */
131 inline hashval_t
132 biv_entry_hasher::hash (const value_type *b)
134 return b->regno;
137 /* Compares biv B and register R. */
139 inline bool
140 biv_entry_hasher::equal (const value_type *b, const compare_type *r)
142 return b->regno == REGNO (r);
145 /* Bivs of the current loop. */
147 static hash_table<biv_entry_hasher> *bivs;
149 static bool iv_analyze_op (rtx_insn *, rtx, struct rtx_iv *);
151 /* Return the RTX code corresponding to the IV extend code EXTEND. */
152 static inline enum rtx_code
153 iv_extend_to_rtx_code (enum iv_extend_code extend)
155 switch (extend)
157 case IV_SIGN_EXTEND:
158 return SIGN_EXTEND;
159 case IV_ZERO_EXTEND:
160 return ZERO_EXTEND;
161 case IV_UNKNOWN_EXTEND:
162 return UNKNOWN;
164 gcc_unreachable ();
167 /* Dumps information about IV to FILE. */
169 extern void dump_iv_info (FILE *, struct rtx_iv *);
170 void
171 dump_iv_info (FILE *file, struct rtx_iv *iv)
173 if (!iv->base)
175 fprintf (file, "not simple");
176 return;
179 if (iv->step == const0_rtx
180 && !iv->first_special)
181 fprintf (file, "invariant ");
183 print_rtl (file, iv->base);
184 if (iv->step != const0_rtx)
186 fprintf (file, " + ");
187 print_rtl (file, iv->step);
188 fprintf (file, " * iteration");
190 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
192 if (iv->mode != iv->extend_mode)
193 fprintf (file, " %s to %s",
194 rtx_name[iv_extend_to_rtx_code (iv->extend)],
195 GET_MODE_NAME (iv->extend_mode));
197 if (iv->mult != const1_rtx)
199 fprintf (file, " * ");
200 print_rtl (file, iv->mult);
202 if (iv->delta != const0_rtx)
204 fprintf (file, " + ");
205 print_rtl (file, iv->delta);
207 if (iv->first_special)
208 fprintf (file, " (first special)");
211 /* Generates a subreg to get the least significant part of EXPR (in mode
212 INNER_MODE) to OUTER_MODE. */
215 lowpart_subreg (machine_mode outer_mode, rtx expr,
216 machine_mode inner_mode)
218 return simplify_gen_subreg (outer_mode, expr, inner_mode,
219 subreg_lowpart_offset (outer_mode, inner_mode));
222 static void
223 check_iv_ref_table_size (void)
225 if (iv_ref_table_size < DF_DEFS_TABLE_SIZE ())
227 unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
228 iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
229 memset (&iv_ref_table[iv_ref_table_size], 0,
230 (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
231 iv_ref_table_size = new_size;
236 /* Checks whether REG is a well-behaved register. */
238 static bool
239 simple_reg_p (rtx reg)
241 unsigned r;
243 if (GET_CODE (reg) == SUBREG)
245 if (!subreg_lowpart_p (reg))
246 return false;
247 reg = SUBREG_REG (reg);
250 if (!REG_P (reg))
251 return false;
253 r = REGNO (reg);
254 if (HARD_REGISTER_NUM_P (r))
255 return false;
257 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
258 return false;
260 return true;
263 /* Clears the information about ivs stored in df. */
265 static void
266 clear_iv_info (void)
268 unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
269 struct rtx_iv *iv;
271 check_iv_ref_table_size ();
272 for (i = 0; i < n_defs; i++)
274 iv = iv_ref_table[i];
275 if (iv)
277 free (iv);
278 iv_ref_table[i] = NULL;
282 bivs->empty ();
286 /* Prepare the data for an induction variable analysis of a LOOP. */
288 void
289 iv_analysis_loop_init (struct loop *loop)
291 current_loop = loop;
293 /* Clear the information from the analysis of the previous loop. */
294 if (clean_slate)
296 df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
297 bivs = new hash_table<biv_entry_hasher> (10);
298 clean_slate = false;
300 else
301 clear_iv_info ();
303 /* Get rid of the ud chains before processing the rescans. Then add
304 the problem back. */
305 df_remove_problem (df_chain);
306 df_process_deferred_rescans ();
307 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
308 df_chain_add_problem (DF_UD_CHAIN);
309 df_note_add_problem ();
310 df_analyze_loop (loop);
311 if (dump_file)
312 df_dump_region (dump_file);
314 check_iv_ref_table_size ();
317 /* Finds the definition of REG that dominates loop latch and stores
318 it to DEF. Returns false if there is not a single definition
319 dominating the latch. If REG has no definition in loop, DEF
320 is set to NULL and true is returned. */
322 static bool
323 latch_dominating_def (rtx reg, df_ref *def)
325 df_ref single_rd = NULL, adef;
326 unsigned regno = REGNO (reg);
327 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
329 for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
331 if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
332 || !bitmap_bit_p (&bb_info->out, DF_REF_ID (adef)))
333 continue;
335 /* More than one reaching definition. */
336 if (single_rd)
337 return false;
339 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
340 return false;
342 single_rd = adef;
345 *def = single_rd;
346 return true;
349 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
351 static enum iv_grd_result
352 iv_get_reaching_def (rtx_insn *insn, rtx reg, df_ref *def)
354 df_ref use, adef;
355 basic_block def_bb, use_bb;
356 rtx_insn *def_insn;
357 bool dom_p;
359 *def = NULL;
360 if (!simple_reg_p (reg))
361 return GRD_INVALID;
362 if (GET_CODE (reg) == SUBREG)
363 reg = SUBREG_REG (reg);
364 gcc_assert (REG_P (reg));
366 use = df_find_use (insn, reg);
367 gcc_assert (use != NULL);
369 if (!DF_REF_CHAIN (use))
370 return GRD_INVARIANT;
372 /* More than one reaching def. */
373 if (DF_REF_CHAIN (use)->next)
374 return GRD_INVALID;
376 adef = DF_REF_CHAIN (use)->ref;
378 /* We do not handle setting only part of the register. */
379 if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
380 return GRD_INVALID;
382 def_insn = DF_REF_INSN (adef);
383 def_bb = DF_REF_BB (adef);
384 use_bb = BLOCK_FOR_INSN (insn);
386 if (use_bb == def_bb)
387 dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
388 else
389 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
391 if (dom_p)
393 *def = adef;
394 return GRD_SINGLE_DOM;
397 /* The definition does not dominate the use. This is still OK if
398 this may be a use of a biv, i.e. if the def_bb dominates loop
399 latch. */
400 if (just_once_each_iteration_p (current_loop, def_bb))
401 return GRD_MAYBE_BIV;
403 return GRD_INVALID;
406 /* Sets IV to invariant CST in MODE. Always returns true (just for
407 consistency with other iv manipulation functions that may fail). */
409 static bool
410 iv_constant (struct rtx_iv *iv, rtx cst, machine_mode mode)
412 if (mode == VOIDmode)
413 mode = GET_MODE (cst);
415 iv->mode = mode;
416 iv->base = cst;
417 iv->step = const0_rtx;
418 iv->first_special = false;
419 iv->extend = IV_UNKNOWN_EXTEND;
420 iv->extend_mode = iv->mode;
421 iv->delta = const0_rtx;
422 iv->mult = const1_rtx;
424 return true;
427 /* Evaluates application of subreg to MODE on IV. */
429 static bool
430 iv_subreg (struct rtx_iv *iv, machine_mode mode)
432 /* If iv is invariant, just calculate the new value. */
433 if (iv->step == const0_rtx
434 && !iv->first_special)
436 rtx val = get_iv_value (iv, const0_rtx);
437 val = lowpart_subreg (mode, val,
438 iv->extend == IV_UNKNOWN_EXTEND
439 ? iv->mode : iv->extend_mode);
441 iv->base = val;
442 iv->extend = IV_UNKNOWN_EXTEND;
443 iv->mode = iv->extend_mode = mode;
444 iv->delta = const0_rtx;
445 iv->mult = const1_rtx;
446 return true;
449 if (iv->extend_mode == mode)
450 return true;
452 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
453 return false;
455 iv->extend = IV_UNKNOWN_EXTEND;
456 iv->mode = mode;
458 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
459 simplify_gen_binary (MULT, iv->extend_mode,
460 iv->base, iv->mult));
461 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
462 iv->mult = const1_rtx;
463 iv->delta = const0_rtx;
464 iv->first_special = false;
466 return true;
469 /* Evaluates application of EXTEND to MODE on IV. */
471 static bool
472 iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, machine_mode mode)
474 /* If iv is invariant, just calculate the new value. */
475 if (iv->step == const0_rtx
476 && !iv->first_special)
478 rtx val = get_iv_value (iv, const0_rtx);
479 if (iv->extend_mode != iv->mode
480 && iv->extend != IV_UNKNOWN_EXTEND
481 && iv->extend != extend)
482 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
483 val = simplify_gen_unary (iv_extend_to_rtx_code (extend), mode,
484 val,
485 iv->extend == extend
486 ? iv->extend_mode : iv->mode);
487 iv->base = val;
488 iv->extend = IV_UNKNOWN_EXTEND;
489 iv->mode = iv->extend_mode = mode;
490 iv->delta = const0_rtx;
491 iv->mult = const1_rtx;
492 return true;
495 if (mode != iv->extend_mode)
496 return false;
498 if (iv->extend != IV_UNKNOWN_EXTEND
499 && iv->extend != extend)
500 return false;
502 iv->extend = extend;
504 return true;
507 /* Evaluates negation of IV. */
509 static bool
510 iv_neg (struct rtx_iv *iv)
512 if (iv->extend == IV_UNKNOWN_EXTEND)
514 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
515 iv->base, iv->extend_mode);
516 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
517 iv->step, iv->extend_mode);
519 else
521 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
522 iv->delta, iv->extend_mode);
523 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
524 iv->mult, iv->extend_mode);
527 return true;
530 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
532 static bool
533 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
535 machine_mode mode;
536 rtx arg;
538 /* Extend the constant to extend_mode of the other operand if necessary. */
539 if (iv0->extend == IV_UNKNOWN_EXTEND
540 && iv0->mode == iv0->extend_mode
541 && iv0->step == const0_rtx
542 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
544 iv0->extend_mode = iv1->extend_mode;
545 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
546 iv0->base, iv0->mode);
548 if (iv1->extend == IV_UNKNOWN_EXTEND
549 && iv1->mode == iv1->extend_mode
550 && iv1->step == const0_rtx
551 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
553 iv1->extend_mode = iv0->extend_mode;
554 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
555 iv1->base, iv1->mode);
558 mode = iv0->extend_mode;
559 if (mode != iv1->extend_mode)
560 return false;
562 if (iv0->extend == IV_UNKNOWN_EXTEND
563 && iv1->extend == IV_UNKNOWN_EXTEND)
565 if (iv0->mode != iv1->mode)
566 return false;
568 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
569 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
571 return true;
574 /* Handle addition of constant. */
575 if (iv1->extend == IV_UNKNOWN_EXTEND
576 && iv1->mode == mode
577 && iv1->step == const0_rtx)
579 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
580 return true;
583 if (iv0->extend == IV_UNKNOWN_EXTEND
584 && iv0->mode == mode
585 && iv0->step == const0_rtx)
587 arg = iv0->base;
588 *iv0 = *iv1;
589 if (op == MINUS
590 && !iv_neg (iv0))
591 return false;
593 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
594 return true;
597 return false;
600 /* Evaluates multiplication of IV by constant CST. */
602 static bool
603 iv_mult (struct rtx_iv *iv, rtx mby)
605 machine_mode mode = iv->extend_mode;
607 if (GET_MODE (mby) != VOIDmode
608 && GET_MODE (mby) != mode)
609 return false;
611 if (iv->extend == IV_UNKNOWN_EXTEND)
613 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
614 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
616 else
618 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
619 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
622 return true;
625 /* Evaluates shift of IV by constant CST. */
627 static bool
628 iv_shift (struct rtx_iv *iv, rtx mby)
630 machine_mode mode = iv->extend_mode;
632 if (GET_MODE (mby) != VOIDmode
633 && GET_MODE (mby) != mode)
634 return false;
636 if (iv->extend == IV_UNKNOWN_EXTEND)
638 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
639 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
641 else
643 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
644 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
647 return true;
650 /* The recursive part of get_biv_step. Gets the value of the single value
651 defined by DEF wrto initial value of REG inside loop, in shape described
652 at get_biv_step. */
654 static bool
655 get_biv_step_1 (df_ref def, rtx reg,
656 rtx *inner_step, machine_mode *inner_mode,
657 enum iv_extend_code *extend, machine_mode outer_mode,
658 rtx *outer_step)
660 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
661 rtx next, nextr, tmp;
662 enum rtx_code code;
663 rtx_insn *insn = DF_REF_INSN (def);
664 df_ref next_def;
665 enum iv_grd_result res;
667 set = single_set (insn);
668 if (!set)
669 return false;
671 rhs = find_reg_equal_equiv_note (insn);
672 if (rhs)
673 rhs = XEXP (rhs, 0);
674 else
675 rhs = SET_SRC (set);
677 code = GET_CODE (rhs);
678 switch (code)
680 case SUBREG:
681 case REG:
682 next = rhs;
683 break;
685 case PLUS:
686 case MINUS:
687 op0 = XEXP (rhs, 0);
688 op1 = XEXP (rhs, 1);
690 if (code == PLUS && CONSTANT_P (op0))
692 tmp = op0; op0 = op1; op1 = tmp;
695 if (!simple_reg_p (op0)
696 || !CONSTANT_P (op1))
697 return false;
699 if (GET_MODE (rhs) != outer_mode)
701 /* ppc64 uses expressions like
703 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
705 this is equivalent to
707 (set x':DI (plus:DI y:DI 1))
708 (set x:SI (subreg:SI (x':DI)). */
709 if (GET_CODE (op0) != SUBREG)
710 return false;
711 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
712 return false;
715 next = op0;
716 break;
718 case SIGN_EXTEND:
719 case ZERO_EXTEND:
720 if (GET_MODE (rhs) != outer_mode)
721 return false;
723 op0 = XEXP (rhs, 0);
724 if (!simple_reg_p (op0))
725 return false;
727 next = op0;
728 break;
730 default:
731 return false;
734 if (GET_CODE (next) == SUBREG)
736 if (!subreg_lowpart_p (next))
737 return false;
739 nextr = SUBREG_REG (next);
740 if (GET_MODE (nextr) != outer_mode)
741 return false;
743 else
744 nextr = next;
746 res = iv_get_reaching_def (insn, nextr, &next_def);
748 if (res == GRD_INVALID || res == GRD_INVARIANT)
749 return false;
751 if (res == GRD_MAYBE_BIV)
753 if (!rtx_equal_p (nextr, reg))
754 return false;
756 *inner_step = const0_rtx;
757 *extend = IV_UNKNOWN_EXTEND;
758 *inner_mode = outer_mode;
759 *outer_step = const0_rtx;
761 else if (!get_biv_step_1 (next_def, reg,
762 inner_step, inner_mode, extend, outer_mode,
763 outer_step))
764 return false;
766 if (GET_CODE (next) == SUBREG)
768 machine_mode amode = GET_MODE (next);
770 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
771 return false;
773 *inner_mode = amode;
774 *inner_step = simplify_gen_binary (PLUS, outer_mode,
775 *inner_step, *outer_step);
776 *outer_step = const0_rtx;
777 *extend = IV_UNKNOWN_EXTEND;
780 switch (code)
782 case REG:
783 case SUBREG:
784 break;
786 case PLUS:
787 case MINUS:
788 if (*inner_mode == outer_mode
789 /* See comment in previous switch. */
790 || GET_MODE (rhs) != outer_mode)
791 *inner_step = simplify_gen_binary (code, outer_mode,
792 *inner_step, op1);
793 else
794 *outer_step = simplify_gen_binary (code, outer_mode,
795 *outer_step, op1);
796 break;
798 case SIGN_EXTEND:
799 case ZERO_EXTEND:
800 gcc_assert (GET_MODE (op0) == *inner_mode
801 && *extend == IV_UNKNOWN_EXTEND
802 && *outer_step == const0_rtx);
804 *extend = (code == SIGN_EXTEND) ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
805 break;
807 default:
808 return false;
811 return true;
814 /* Gets the operation on register REG inside loop, in shape
816 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
818 If the operation cannot be described in this shape, return false.
819 LAST_DEF is the definition of REG that dominates loop latch. */
821 static bool
822 get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
823 machine_mode *inner_mode, enum iv_extend_code *extend,
824 machine_mode *outer_mode, rtx *outer_step)
826 *outer_mode = GET_MODE (reg);
828 if (!get_biv_step_1 (last_def, reg,
829 inner_step, inner_mode, extend, *outer_mode,
830 outer_step))
831 return false;
833 gcc_assert ((*inner_mode == *outer_mode) != (*extend != IV_UNKNOWN_EXTEND));
834 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
836 return true;
839 /* Records information that DEF is induction variable IV. */
841 static void
842 record_iv (df_ref def, struct rtx_iv *iv)
844 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
846 *recorded_iv = *iv;
847 check_iv_ref_table_size ();
848 DF_REF_IV_SET (def, recorded_iv);
851 /* If DEF was already analyzed for bivness, store the description of the biv to
852 IV and return true. Otherwise return false. */
854 static bool
855 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
857 struct biv_entry *biv = bivs->find_with_hash (def, REGNO (def));
859 if (!biv)
860 return false;
862 *iv = biv->iv;
863 return true;
866 static void
867 record_biv (rtx def, struct rtx_iv *iv)
869 struct biv_entry *biv = XNEW (struct biv_entry);
870 biv_entry **slot = bivs->find_slot_with_hash (def, REGNO (def), INSERT);
872 biv->regno = REGNO (def);
873 biv->iv = *iv;
874 gcc_assert (!*slot);
875 *slot = biv;
878 /* Determines whether DEF is a biv and if so, stores its description
879 to *IV. */
881 static bool
882 iv_analyze_biv (rtx def, struct rtx_iv *iv)
884 rtx inner_step, outer_step;
885 machine_mode inner_mode, outer_mode;
886 enum iv_extend_code extend;
887 df_ref last_def;
889 if (dump_file)
891 fprintf (dump_file, "Analyzing ");
892 print_rtl (dump_file, def);
893 fprintf (dump_file, " for bivness.\n");
896 if (!REG_P (def))
898 if (!CONSTANT_P (def))
899 return false;
901 return iv_constant (iv, def, VOIDmode);
904 if (!latch_dominating_def (def, &last_def))
906 if (dump_file)
907 fprintf (dump_file, " not simple.\n");
908 return false;
911 if (!last_def)
912 return iv_constant (iv, def, VOIDmode);
914 if (analyzed_for_bivness_p (def, iv))
916 if (dump_file)
917 fprintf (dump_file, " already analysed.\n");
918 return iv->base != NULL_RTX;
921 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
922 &outer_mode, &outer_step))
924 iv->base = NULL_RTX;
925 goto end;
928 /* Loop transforms base to es (base + inner_step) + outer_step,
929 where es means extend of subreg between inner_mode and outer_mode.
930 The corresponding induction variable is
932 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
934 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
935 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
936 iv->mode = inner_mode;
937 iv->extend_mode = outer_mode;
938 iv->extend = extend;
939 iv->mult = const1_rtx;
940 iv->delta = outer_step;
941 iv->first_special = inner_mode != outer_mode;
943 end:
944 if (dump_file)
946 fprintf (dump_file, " ");
947 dump_iv_info (dump_file, iv);
948 fprintf (dump_file, "\n");
951 record_biv (def, iv);
952 return iv->base != NULL_RTX;
955 /* Analyzes expression RHS used at INSN and stores the result to *IV.
956 The mode of the induction variable is MODE. */
958 bool
959 iv_analyze_expr (rtx_insn *insn, rtx rhs, machine_mode mode,
960 struct rtx_iv *iv)
962 rtx mby = NULL_RTX, tmp;
963 rtx op0 = NULL_RTX, op1 = NULL_RTX;
964 struct rtx_iv iv0, iv1;
965 enum rtx_code code = GET_CODE (rhs);
966 machine_mode omode = mode;
968 iv->mode = VOIDmode;
969 iv->base = NULL_RTX;
970 iv->step = NULL_RTX;
972 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
974 if (CONSTANT_P (rhs)
975 || REG_P (rhs)
976 || code == SUBREG)
978 if (!iv_analyze_op (insn, rhs, iv))
979 return false;
981 if (iv->mode == VOIDmode)
983 iv->mode = mode;
984 iv->extend_mode = mode;
987 return true;
990 switch (code)
992 case REG:
993 op0 = rhs;
994 break;
996 case SIGN_EXTEND:
997 case ZERO_EXTEND:
998 case NEG:
999 op0 = XEXP (rhs, 0);
1000 omode = GET_MODE (op0);
1001 break;
1003 case PLUS:
1004 case MINUS:
1005 op0 = XEXP (rhs, 0);
1006 op1 = XEXP (rhs, 1);
1007 break;
1009 case MULT:
1010 op0 = XEXP (rhs, 0);
1011 mby = XEXP (rhs, 1);
1012 if (!CONSTANT_P (mby))
1014 tmp = op0;
1015 op0 = mby;
1016 mby = tmp;
1018 if (!CONSTANT_P (mby))
1019 return false;
1020 break;
1022 case ASHIFT:
1023 op0 = XEXP (rhs, 0);
1024 mby = XEXP (rhs, 1);
1025 if (!CONSTANT_P (mby))
1026 return false;
1027 break;
1029 default:
1030 return false;
1033 if (op0
1034 && !iv_analyze_expr (insn, op0, omode, &iv0))
1035 return false;
1037 if (op1
1038 && !iv_analyze_expr (insn, op1, omode, &iv1))
1039 return false;
1041 switch (code)
1043 case SIGN_EXTEND:
1044 if (!iv_extend (&iv0, IV_SIGN_EXTEND, mode))
1045 return false;
1046 break;
1048 case ZERO_EXTEND:
1049 if (!iv_extend (&iv0, IV_ZERO_EXTEND, mode))
1050 return false;
1051 break;
1053 case NEG:
1054 if (!iv_neg (&iv0))
1055 return false;
1056 break;
1058 case PLUS:
1059 case MINUS:
1060 if (!iv_add (&iv0, &iv1, code))
1061 return false;
1062 break;
1064 case MULT:
1065 if (!iv_mult (&iv0, mby))
1066 return false;
1067 break;
1069 case ASHIFT:
1070 if (!iv_shift (&iv0, mby))
1071 return false;
1072 break;
1074 default:
1075 break;
1078 *iv = iv0;
1079 return iv->base != NULL_RTX;
1082 /* Analyzes iv DEF and stores the result to *IV. */
1084 static bool
1085 iv_analyze_def (df_ref def, struct rtx_iv *iv)
1087 rtx_insn *insn = DF_REF_INSN (def);
1088 rtx reg = DF_REF_REG (def);
1089 rtx set, rhs;
1091 if (dump_file)
1093 fprintf (dump_file, "Analyzing def of ");
1094 print_rtl (dump_file, reg);
1095 fprintf (dump_file, " in insn ");
1096 print_rtl_single (dump_file, insn);
1099 check_iv_ref_table_size ();
1100 if (DF_REF_IV (def))
1102 if (dump_file)
1103 fprintf (dump_file, " already analysed.\n");
1104 *iv = *DF_REF_IV (def);
1105 return iv->base != NULL_RTX;
1108 iv->mode = VOIDmode;
1109 iv->base = NULL_RTX;
1110 iv->step = NULL_RTX;
1112 if (!REG_P (reg))
1113 return false;
1115 set = single_set (insn);
1116 if (!set)
1117 return false;
1119 if (!REG_P (SET_DEST (set)))
1120 return false;
1122 gcc_assert (SET_DEST (set) == reg);
1123 rhs = find_reg_equal_equiv_note (insn);
1124 if (rhs)
1125 rhs = XEXP (rhs, 0);
1126 else
1127 rhs = SET_SRC (set);
1129 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1130 record_iv (def, iv);
1132 if (dump_file)
1134 print_rtl (dump_file, reg);
1135 fprintf (dump_file, " in insn ");
1136 print_rtl_single (dump_file, insn);
1137 fprintf (dump_file, " is ");
1138 dump_iv_info (dump_file, iv);
1139 fprintf (dump_file, "\n");
1142 return iv->base != NULL_RTX;
1145 /* Analyzes operand OP of INSN and stores the result to *IV. */
1147 static bool
1148 iv_analyze_op (rtx_insn *insn, rtx op, struct rtx_iv *iv)
1150 df_ref def = NULL;
1151 enum iv_grd_result res;
1153 if (dump_file)
1155 fprintf (dump_file, "Analyzing operand ");
1156 print_rtl (dump_file, op);
1157 fprintf (dump_file, " of insn ");
1158 print_rtl_single (dump_file, insn);
1161 if (function_invariant_p (op))
1162 res = GRD_INVARIANT;
1163 else if (GET_CODE (op) == SUBREG)
1165 if (!subreg_lowpart_p (op))
1166 return false;
1168 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1169 return false;
1171 return iv_subreg (iv, GET_MODE (op));
1173 else
1175 res = iv_get_reaching_def (insn, op, &def);
1176 if (res == GRD_INVALID)
1178 if (dump_file)
1179 fprintf (dump_file, " not simple.\n");
1180 return false;
1184 if (res == GRD_INVARIANT)
1186 iv_constant (iv, op, VOIDmode);
1188 if (dump_file)
1190 fprintf (dump_file, " ");
1191 dump_iv_info (dump_file, iv);
1192 fprintf (dump_file, "\n");
1194 return true;
1197 if (res == GRD_MAYBE_BIV)
1198 return iv_analyze_biv (op, iv);
1200 return iv_analyze_def (def, iv);
1203 /* Analyzes value VAL at INSN and stores the result to *IV. */
1205 bool
1206 iv_analyze (rtx_insn *insn, rtx val, struct rtx_iv *iv)
1208 rtx reg;
1210 /* We must find the insn in that val is used, so that we get to UD chains.
1211 Since the function is sometimes called on result of get_condition,
1212 this does not necessarily have to be directly INSN; scan also the
1213 following insns. */
1214 if (simple_reg_p (val))
1216 if (GET_CODE (val) == SUBREG)
1217 reg = SUBREG_REG (val);
1218 else
1219 reg = val;
1221 while (!df_find_use (insn, reg))
1222 insn = NEXT_INSN (insn);
1225 return iv_analyze_op (insn, val, iv);
1228 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1230 bool
1231 iv_analyze_result (rtx_insn *insn, rtx def, struct rtx_iv *iv)
1233 df_ref adef;
1235 adef = df_find_def (insn, def);
1236 if (!adef)
1237 return false;
1239 return iv_analyze_def (adef, iv);
1242 /* Checks whether definition of register REG in INSN is a basic induction
1243 variable. IV analysis must have been initialized (via a call to
1244 iv_analysis_loop_init) for this function to produce a result. */
1246 bool
1247 biv_p (rtx_insn *insn, rtx reg)
1249 struct rtx_iv iv;
1250 df_ref def, last_def;
1252 if (!simple_reg_p (reg))
1253 return false;
1255 def = df_find_def (insn, reg);
1256 gcc_assert (def != NULL);
1257 if (!latch_dominating_def (reg, &last_def))
1258 return false;
1259 if (last_def != def)
1260 return false;
1262 if (!iv_analyze_biv (reg, &iv))
1263 return false;
1265 return iv.step != const0_rtx;
1268 /* Calculates value of IV at ITERATION-th iteration. */
1271 get_iv_value (struct rtx_iv *iv, rtx iteration)
1273 rtx val;
1275 /* We would need to generate some if_then_else patterns, and so far
1276 it is not needed anywhere. */
1277 gcc_assert (!iv->first_special);
1279 if (iv->step != const0_rtx && iteration != const0_rtx)
1280 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1281 simplify_gen_binary (MULT, iv->extend_mode,
1282 iv->step, iteration));
1283 else
1284 val = iv->base;
1286 if (iv->extend_mode == iv->mode)
1287 return val;
1289 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1291 if (iv->extend == IV_UNKNOWN_EXTEND)
1292 return val;
1294 val = simplify_gen_unary (iv_extend_to_rtx_code (iv->extend),
1295 iv->extend_mode, val, iv->mode);
1296 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1297 simplify_gen_binary (MULT, iv->extend_mode,
1298 iv->mult, val));
1300 return val;
1303 /* Free the data for an induction variable analysis. */
1305 void
1306 iv_analysis_done (void)
1308 if (!clean_slate)
1310 clear_iv_info ();
1311 clean_slate = true;
1312 df_finish_pass (true);
1313 delete bivs;
1314 bivs = NULL;
1315 free (iv_ref_table);
1316 iv_ref_table = NULL;
1317 iv_ref_table_size = 0;
1321 /* Computes inverse to X modulo (1 << MOD). */
1323 static uint64_t
1324 inverse (uint64_t x, int mod)
1326 uint64_t mask =
1327 ((uint64_t) 1 << (mod - 1) << 1) - 1;
1328 uint64_t rslt = 1;
1329 int i;
1331 for (i = 0; i < mod - 1; i++)
1333 rslt = (rslt * x) & mask;
1334 x = (x * x) & mask;
1337 return rslt;
1340 /* Checks whether any register in X is in set ALT. */
1342 static bool
1343 altered_reg_used (const_rtx x, bitmap alt)
1345 subrtx_iterator::array_type array;
1346 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1348 const_rtx x = *iter;
1349 if (REG_P (x) && REGNO_REG_SET_P (alt, REGNO (x)))
1350 return true;
1352 return false;
1355 /* Marks registers altered by EXPR in set ALT. */
1357 static void
1358 mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1360 if (GET_CODE (expr) == SUBREG)
1361 expr = SUBREG_REG (expr);
1362 if (!REG_P (expr))
1363 return;
1365 SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1368 /* Checks whether RHS is simple enough to process. */
1370 static bool
1371 simple_rhs_p (rtx rhs)
1373 rtx op0, op1;
1375 if (function_invariant_p (rhs)
1376 || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1377 return true;
1379 switch (GET_CODE (rhs))
1381 case PLUS:
1382 case MINUS:
1383 case AND:
1384 op0 = XEXP (rhs, 0);
1385 op1 = XEXP (rhs, 1);
1386 /* Allow reg OP const and reg OP reg. */
1387 if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1388 && !function_invariant_p (op0))
1389 return false;
1390 if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1391 && !function_invariant_p (op1))
1392 return false;
1394 return true;
1396 case ASHIFT:
1397 case ASHIFTRT:
1398 case LSHIFTRT:
1399 case MULT:
1400 op0 = XEXP (rhs, 0);
1401 op1 = XEXP (rhs, 1);
1402 /* Allow reg OP const. */
1403 if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1404 return false;
1405 if (!function_invariant_p (op1))
1406 return false;
1408 return true;
1410 default:
1411 return false;
1415 /* If REGNO has a single definition, return its known value, otherwise return
1416 null. */
1418 static rtx
1419 find_single_def_src (unsigned int regno)
1421 df_ref adef;
1422 rtx set, src;
1424 for (;;)
1426 rtx note;
1427 adef = DF_REG_DEF_CHAIN (regno);
1428 if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1429 || DF_REF_IS_ARTIFICIAL (adef))
1430 return NULL_RTX;
1432 set = single_set (DF_REF_INSN (adef));
1433 if (set == NULL || !REG_P (SET_DEST (set))
1434 || REGNO (SET_DEST (set)) != regno)
1435 return NULL_RTX;
1437 note = find_reg_equal_equiv_note (DF_REF_INSN (adef));
1439 if (note && function_invariant_p (XEXP (note, 0)))
1441 src = XEXP (note, 0);
1442 break;
1444 src = SET_SRC (set);
1446 if (REG_P (src))
1448 regno = REGNO (src);
1449 continue;
1451 break;
1453 if (!function_invariant_p (src))
1454 return NULL_RTX;
1456 return src;
1459 /* If any registers in *EXPR that have a single definition, try to replace
1460 them with the known-equivalent values. */
1462 static void
1463 replace_single_def_regs (rtx *expr)
1465 subrtx_var_iterator::array_type array;
1466 repeat:
1467 FOR_EACH_SUBRTX_VAR (iter, array, *expr, NONCONST)
1469 rtx x = *iter;
1470 if (REG_P (x))
1471 if (rtx new_x = find_single_def_src (REGNO (x)))
1473 *expr = simplify_replace_rtx (*expr, x, new_x);
1474 goto repeat;
1479 /* A subroutine of simplify_using_initial_values, this function examines INSN
1480 to see if it contains a suitable set that we can use to make a replacement.
1481 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1482 the set; return false otherwise. */
1484 static bool
1485 suitable_set_for_replacement (rtx_insn *insn, rtx *dest, rtx *src)
1487 rtx set = single_set (insn);
1488 rtx lhs = NULL_RTX, rhs;
1490 if (!set)
1491 return false;
1493 lhs = SET_DEST (set);
1494 if (!REG_P (lhs))
1495 return false;
1497 rhs = find_reg_equal_equiv_note (insn);
1498 if (rhs)
1499 rhs = XEXP (rhs, 0);
1500 else
1501 rhs = SET_SRC (set);
1503 if (!simple_rhs_p (rhs))
1504 return false;
1506 *dest = lhs;
1507 *src = rhs;
1508 return true;
1511 /* Using the data returned by suitable_set_for_replacement, replace DEST
1512 with SRC in *EXPR and return the new expression. Also call
1513 replace_single_def_regs if the replacement changed something. */
1514 static void
1515 replace_in_expr (rtx *expr, rtx dest, rtx src)
1517 rtx old = *expr;
1518 *expr = simplify_replace_rtx (*expr, dest, src);
1519 if (old == *expr)
1520 return;
1521 replace_single_def_regs (expr);
1524 /* Checks whether A implies B. */
1526 static bool
1527 implies_p (rtx a, rtx b)
1529 rtx op0, op1, opb0, opb1, r;
1530 machine_mode mode;
1532 if (rtx_equal_p (a, b))
1533 return true;
1535 if (GET_CODE (a) == EQ)
1537 op0 = XEXP (a, 0);
1538 op1 = XEXP (a, 1);
1540 if (REG_P (op0)
1541 || (GET_CODE (op0) == SUBREG
1542 && REG_P (SUBREG_REG (op0))))
1544 r = simplify_replace_rtx (b, op0, op1);
1545 if (r == const_true_rtx)
1546 return true;
1549 if (REG_P (op1)
1550 || (GET_CODE (op1) == SUBREG
1551 && REG_P (SUBREG_REG (op1))))
1553 r = simplify_replace_rtx (b, op1, op0);
1554 if (r == const_true_rtx)
1555 return true;
1559 if (b == const_true_rtx)
1560 return true;
1562 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1563 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1564 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1565 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1566 return false;
1568 op0 = XEXP (a, 0);
1569 op1 = XEXP (a, 1);
1570 opb0 = XEXP (b, 0);
1571 opb1 = XEXP (b, 1);
1573 mode = GET_MODE (op0);
1574 if (mode != GET_MODE (opb0))
1575 mode = VOIDmode;
1576 else if (mode == VOIDmode)
1578 mode = GET_MODE (op1);
1579 if (mode != GET_MODE (opb1))
1580 mode = VOIDmode;
1583 /* A < B implies A + 1 <= B. */
1584 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1585 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1588 if (GET_CODE (a) == GT)
1590 r = op0;
1591 op0 = op1;
1592 op1 = r;
1595 if (GET_CODE (b) == GE)
1597 r = opb0;
1598 opb0 = opb1;
1599 opb1 = r;
1602 if (SCALAR_INT_MODE_P (mode)
1603 && rtx_equal_p (op1, opb1)
1604 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1605 return true;
1606 return false;
1609 /* A < B or A > B imply A != B. TODO: Likewise
1610 A + n < B implies A != B + n if neither wraps. */
1611 if (GET_CODE (b) == NE
1612 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1613 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1615 if (rtx_equal_p (op0, opb0)
1616 && rtx_equal_p (op1, opb1))
1617 return true;
1620 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1621 if (GET_CODE (a) == NE
1622 && op1 == const0_rtx)
1624 if ((GET_CODE (b) == GTU
1625 && opb1 == const0_rtx)
1626 || (GET_CODE (b) == GEU
1627 && opb1 == const1_rtx))
1628 return rtx_equal_p (op0, opb0);
1631 /* A != N is equivalent to A - (N + 1) <u -1. */
1632 if (GET_CODE (a) == NE
1633 && CONST_INT_P (op1)
1634 && GET_CODE (b) == LTU
1635 && opb1 == constm1_rtx
1636 && GET_CODE (opb0) == PLUS
1637 && CONST_INT_P (XEXP (opb0, 1))
1638 /* Avoid overflows. */
1639 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1640 != ((unsigned HOST_WIDE_INT)1
1641 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1642 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1643 return rtx_equal_p (op0, XEXP (opb0, 0));
1645 /* Likewise, A != N implies A - N > 0. */
1646 if (GET_CODE (a) == NE
1647 && CONST_INT_P (op1))
1649 if (GET_CODE (b) == GTU
1650 && GET_CODE (opb0) == PLUS
1651 && opb1 == const0_rtx
1652 && CONST_INT_P (XEXP (opb0, 1))
1653 /* Avoid overflows. */
1654 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1655 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1656 && rtx_equal_p (XEXP (opb0, 0), op0))
1657 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1658 if (GET_CODE (b) == GEU
1659 && GET_CODE (opb0) == PLUS
1660 && opb1 == const1_rtx
1661 && CONST_INT_P (XEXP (opb0, 1))
1662 /* Avoid overflows. */
1663 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1664 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1665 && rtx_equal_p (XEXP (opb0, 0), op0))
1666 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1669 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1670 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1671 && CONST_INT_P (op1)
1672 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1673 || INTVAL (op1) >= 0)
1674 && GET_CODE (b) == LTU
1675 && CONST_INT_P (opb1)
1676 && rtx_equal_p (op0, opb0))
1677 return INTVAL (opb1) < 0;
1679 return false;
1682 /* Canonicalizes COND so that
1684 (1) Ensure that operands are ordered according to
1685 swap_commutative_operands_p.
1686 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1687 for GE, GEU, and LEU. */
1690 canon_condition (rtx cond)
1692 rtx tem;
1693 rtx op0, op1;
1694 enum rtx_code code;
1695 machine_mode mode;
1697 code = GET_CODE (cond);
1698 op0 = XEXP (cond, 0);
1699 op1 = XEXP (cond, 1);
1701 if (swap_commutative_operands_p (op0, op1))
1703 code = swap_condition (code);
1704 tem = op0;
1705 op0 = op1;
1706 op1 = tem;
1709 mode = GET_MODE (op0);
1710 if (mode == VOIDmode)
1711 mode = GET_MODE (op1);
1712 gcc_assert (mode != VOIDmode);
1714 if (CONST_INT_P (op1)
1715 && GET_MODE_CLASS (mode) != MODE_CC
1716 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1718 HOST_WIDE_INT const_val = INTVAL (op1);
1719 unsigned HOST_WIDE_INT uconst_val = const_val;
1720 unsigned HOST_WIDE_INT max_val
1721 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1723 switch (code)
1725 case LE:
1726 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1727 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1728 break;
1730 /* When cross-compiling, const_val might be sign-extended from
1731 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1732 case GE:
1733 if ((HOST_WIDE_INT) (const_val & max_val)
1734 != (((HOST_WIDE_INT) 1
1735 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1736 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1737 break;
1739 case LEU:
1740 if (uconst_val < max_val)
1741 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1742 break;
1744 case GEU:
1745 if (uconst_val != 0)
1746 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1747 break;
1749 default:
1750 break;
1754 if (op0 != XEXP (cond, 0)
1755 || op1 != XEXP (cond, 1)
1756 || code != GET_CODE (cond)
1757 || GET_MODE (cond) != SImode)
1758 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1760 return cond;
1763 /* Reverses CONDition; returns NULL if we cannot. */
1765 static rtx
1766 reversed_condition (rtx cond)
1768 enum rtx_code reversed;
1769 reversed = reversed_comparison_code (cond, NULL);
1770 if (reversed == UNKNOWN)
1771 return NULL_RTX;
1772 else
1773 return gen_rtx_fmt_ee (reversed,
1774 GET_MODE (cond), XEXP (cond, 0),
1775 XEXP (cond, 1));
1778 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1779 set of altered regs. */
1781 void
1782 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1784 rtx rev, reve, exp = *expr;
1786 /* If some register gets altered later, we do not really speak about its
1787 value at the time of comparison. */
1788 if (altered && altered_reg_used (cond, altered))
1789 return;
1791 if (GET_CODE (cond) == EQ
1792 && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1794 *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1795 return;
1798 if (!COMPARISON_P (exp))
1799 return;
1801 rev = reversed_condition (cond);
1802 reve = reversed_condition (exp);
1804 cond = canon_condition (cond);
1805 exp = canon_condition (exp);
1806 if (rev)
1807 rev = canon_condition (rev);
1808 if (reve)
1809 reve = canon_condition (reve);
1811 if (rtx_equal_p (exp, cond))
1813 *expr = const_true_rtx;
1814 return;
1817 if (rev && rtx_equal_p (exp, rev))
1819 *expr = const0_rtx;
1820 return;
1823 if (implies_p (cond, exp))
1825 *expr = const_true_rtx;
1826 return;
1829 if (reve && implies_p (cond, reve))
1831 *expr = const0_rtx;
1832 return;
1835 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1836 be false. */
1837 if (rev && implies_p (exp, rev))
1839 *expr = const0_rtx;
1840 return;
1843 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1844 if (rev && reve && implies_p (reve, rev))
1846 *expr = const_true_rtx;
1847 return;
1850 /* We would like to have some other tests here. TODO. */
1852 return;
1855 /* Use relationship between A and *B to eventually eliminate *B.
1856 OP is the operation we consider. */
1858 static void
1859 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1861 switch (op)
1863 case AND:
1864 /* If A implies *B, we may replace *B by true. */
1865 if (implies_p (a, *b))
1866 *b = const_true_rtx;
1867 break;
1869 case IOR:
1870 /* If *B implies A, we may replace *B by false. */
1871 if (implies_p (*b, a))
1872 *b = const0_rtx;
1873 break;
1875 default:
1876 gcc_unreachable ();
1880 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1881 operation we consider. */
1883 static void
1884 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1886 rtx elt;
1888 for (elt = tail; elt; elt = XEXP (elt, 1))
1889 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1890 for (elt = tail; elt; elt = XEXP (elt, 1))
1891 eliminate_implied_condition (op, XEXP (elt, 0), head);
1894 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1895 is a list, its elements are assumed to be combined using OP. */
1897 static void
1898 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1900 bool expression_valid;
1901 rtx head, tail, last_valid_expr;
1902 rtx_expr_list *cond_list;
1903 rtx_insn *insn;
1904 rtx neutral, aggr;
1905 regset altered, this_altered;
1906 edge e;
1908 if (!*expr)
1909 return;
1911 if (CONSTANT_P (*expr))
1912 return;
1914 if (GET_CODE (*expr) == EXPR_LIST)
1916 head = XEXP (*expr, 0);
1917 tail = XEXP (*expr, 1);
1919 eliminate_implied_conditions (op, &head, tail);
1921 switch (op)
1923 case AND:
1924 neutral = const_true_rtx;
1925 aggr = const0_rtx;
1926 break;
1928 case IOR:
1929 neutral = const0_rtx;
1930 aggr = const_true_rtx;
1931 break;
1933 default:
1934 gcc_unreachable ();
1937 simplify_using_initial_values (loop, UNKNOWN, &head);
1938 if (head == aggr)
1940 XEXP (*expr, 0) = aggr;
1941 XEXP (*expr, 1) = NULL_RTX;
1942 return;
1944 else if (head == neutral)
1946 *expr = tail;
1947 simplify_using_initial_values (loop, op, expr);
1948 return;
1950 simplify_using_initial_values (loop, op, &tail);
1952 if (tail && XEXP (tail, 0) == aggr)
1954 *expr = tail;
1955 return;
1958 XEXP (*expr, 0) = head;
1959 XEXP (*expr, 1) = tail;
1960 return;
1963 gcc_assert (op == UNKNOWN);
1965 replace_single_def_regs (expr);
1966 if (CONSTANT_P (*expr))
1967 return;
1969 e = loop_preheader_edge (loop);
1970 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1971 return;
1973 altered = ALLOC_REG_SET (&reg_obstack);
1974 this_altered = ALLOC_REG_SET (&reg_obstack);
1976 expression_valid = true;
1977 last_valid_expr = *expr;
1978 cond_list = NULL;
1979 while (1)
1981 insn = BB_END (e->src);
1982 if (any_condjump_p (insn))
1984 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1986 if (cond && (e->flags & EDGE_FALLTHRU))
1987 cond = reversed_condition (cond);
1988 if (cond)
1990 rtx old = *expr;
1991 simplify_using_condition (cond, expr, altered);
1992 if (old != *expr)
1994 rtx note;
1995 if (CONSTANT_P (*expr))
1996 goto out;
1997 for (note = cond_list; note; note = XEXP (note, 1))
1999 simplify_using_condition (XEXP (note, 0), expr, altered);
2000 if (CONSTANT_P (*expr))
2001 goto out;
2004 cond_list = alloc_EXPR_LIST (0, cond, cond_list);
2008 FOR_BB_INSNS_REVERSE (e->src, insn)
2010 rtx src, dest;
2011 rtx old = *expr;
2013 if (!INSN_P (insn))
2014 continue;
2016 CLEAR_REG_SET (this_altered);
2017 note_stores (PATTERN (insn), mark_altered, this_altered);
2018 if (CALL_P (insn))
2020 /* Kill all call clobbered registers. */
2021 unsigned int i;
2022 hard_reg_set_iterator hrsi;
2023 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call,
2024 0, i, hrsi)
2025 SET_REGNO_REG_SET (this_altered, i);
2028 if (suitable_set_for_replacement (insn, &dest, &src))
2030 rtx_expr_list **pnote, **pnote_next;
2032 replace_in_expr (expr, dest, src);
2033 if (CONSTANT_P (*expr))
2034 goto out;
2036 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2038 rtx note = *pnote;
2039 rtx old_cond = XEXP (note, 0);
2041 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2042 replace_in_expr (&XEXP (note, 0), dest, src);
2044 /* We can no longer use a condition that has been simplified
2045 to a constant, and simplify_using_condition will abort if
2046 we try. */
2047 if (CONSTANT_P (XEXP (note, 0)))
2049 *pnote = *pnote_next;
2050 pnote_next = pnote;
2051 free_EXPR_LIST_node (note);
2053 /* Retry simplifications with this condition if either the
2054 expression or the condition changed. */
2055 else if (old_cond != XEXP (note, 0) || old != *expr)
2056 simplify_using_condition (XEXP (note, 0), expr, altered);
2059 else
2061 rtx_expr_list **pnote, **pnote_next;
2063 /* If we did not use this insn to make a replacement, any overlap
2064 between stores in this insn and our expression will cause the
2065 expression to become invalid. */
2066 if (altered_reg_used (*expr, this_altered))
2067 goto out;
2069 /* Likewise for the conditions. */
2070 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2072 rtx note = *pnote;
2073 rtx old_cond = XEXP (note, 0);
2075 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2076 if (altered_reg_used (old_cond, this_altered))
2078 *pnote = *pnote_next;
2079 pnote_next = pnote;
2080 free_EXPR_LIST_node (note);
2085 if (CONSTANT_P (*expr))
2086 goto out;
2088 IOR_REG_SET (altered, this_altered);
2090 /* If the expression now contains regs that have been altered, we
2091 can't return it to the caller. However, it is still valid for
2092 further simplification, so keep searching to see if we can
2093 eventually turn it into a constant. */
2094 if (altered_reg_used (*expr, altered))
2095 expression_valid = false;
2096 if (expression_valid)
2097 last_valid_expr = *expr;
2100 if (!single_pred_p (e->src)
2101 || single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2102 break;
2103 e = single_pred_edge (e->src);
2106 out:
2107 free_EXPR_LIST_list (&cond_list);
2108 if (!CONSTANT_P (*expr))
2109 *expr = last_valid_expr;
2110 FREE_REG_SET (altered);
2111 FREE_REG_SET (this_altered);
2114 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
2115 that IV occurs as left operands of comparison COND and its signedness
2116 is SIGNED_P to DESC. */
2118 static void
2119 shorten_into_mode (struct rtx_iv *iv, machine_mode mode,
2120 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2122 rtx mmin, mmax, cond_over, cond_under;
2124 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2125 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2126 iv->base, mmin);
2127 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2128 iv->base, mmax);
2130 switch (cond)
2132 case LE:
2133 case LT:
2134 case LEU:
2135 case LTU:
2136 if (cond_under != const0_rtx)
2137 desc->infinite =
2138 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2139 if (cond_over != const0_rtx)
2140 desc->noloop_assumptions =
2141 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2142 break;
2144 case GE:
2145 case GT:
2146 case GEU:
2147 case GTU:
2148 if (cond_over != const0_rtx)
2149 desc->infinite =
2150 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2151 if (cond_under != const0_rtx)
2152 desc->noloop_assumptions =
2153 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2154 break;
2156 case NE:
2157 if (cond_over != const0_rtx)
2158 desc->infinite =
2159 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2160 if (cond_under != const0_rtx)
2161 desc->infinite =
2162 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2163 break;
2165 default:
2166 gcc_unreachable ();
2169 iv->mode = mode;
2170 iv->extend = signed_p ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
2173 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2174 subregs of the same mode if possible (sometimes it is necessary to add
2175 some assumptions to DESC). */
2177 static bool
2178 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2179 enum rtx_code cond, struct niter_desc *desc)
2181 machine_mode comp_mode;
2182 bool signed_p;
2184 /* If the ivs behave specially in the first iteration, or are
2185 added/multiplied after extending, we ignore them. */
2186 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2187 return false;
2188 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2189 return false;
2191 /* If there is some extend, it must match signedness of the comparison. */
2192 switch (cond)
2194 case LE:
2195 case LT:
2196 if (iv0->extend == IV_ZERO_EXTEND
2197 || iv1->extend == IV_ZERO_EXTEND)
2198 return false;
2199 signed_p = true;
2200 break;
2202 case LEU:
2203 case LTU:
2204 if (iv0->extend == IV_SIGN_EXTEND
2205 || iv1->extend == IV_SIGN_EXTEND)
2206 return false;
2207 signed_p = false;
2208 break;
2210 case NE:
2211 if (iv0->extend != IV_UNKNOWN_EXTEND
2212 && iv1->extend != IV_UNKNOWN_EXTEND
2213 && iv0->extend != iv1->extend)
2214 return false;
2216 signed_p = false;
2217 if (iv0->extend != IV_UNKNOWN_EXTEND)
2218 signed_p = iv0->extend == IV_SIGN_EXTEND;
2219 if (iv1->extend != IV_UNKNOWN_EXTEND)
2220 signed_p = iv1->extend == IV_SIGN_EXTEND;
2221 break;
2223 default:
2224 gcc_unreachable ();
2227 /* Values of both variables should be computed in the same mode. These
2228 might indeed be different, if we have comparison like
2230 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2232 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2233 in different modes. This does not seem impossible to handle, but
2234 it hardly ever occurs in practice.
2236 The only exception is the case when one of operands is invariant.
2237 For example pentium 3 generates comparisons like
2238 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2239 definitely do not want this prevent the optimization. */
2240 comp_mode = iv0->extend_mode;
2241 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2242 comp_mode = iv1->extend_mode;
2244 if (iv0->extend_mode != comp_mode)
2246 if (iv0->mode != iv0->extend_mode
2247 || iv0->step != const0_rtx)
2248 return false;
2250 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2251 comp_mode, iv0->base, iv0->mode);
2252 iv0->extend_mode = comp_mode;
2255 if (iv1->extend_mode != comp_mode)
2257 if (iv1->mode != iv1->extend_mode
2258 || iv1->step != const0_rtx)
2259 return false;
2261 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2262 comp_mode, iv1->base, iv1->mode);
2263 iv1->extend_mode = comp_mode;
2266 /* Check that both ivs belong to a range of a single mode. If one of the
2267 operands is an invariant, we may need to shorten it into the common
2268 mode. */
2269 if (iv0->mode == iv0->extend_mode
2270 && iv0->step == const0_rtx
2271 && iv0->mode != iv1->mode)
2272 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2274 if (iv1->mode == iv1->extend_mode
2275 && iv1->step == const0_rtx
2276 && iv0->mode != iv1->mode)
2277 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2279 if (iv0->mode != iv1->mode)
2280 return false;
2282 desc->mode = iv0->mode;
2283 desc->signed_p = signed_p;
2285 return true;
2288 /* Tries to estimate the maximum number of iterations in LOOP, and return the
2289 result. This function is called from iv_number_of_iterations with
2290 a number of fields in DESC already filled in. OLD_NITER is the original
2291 expression for the number of iterations, before we tried to simplify it. */
2293 static uint64_t
2294 determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2296 rtx niter = desc->niter_expr;
2297 rtx mmin, mmax, cmp;
2298 uint64_t nmax, inc;
2299 uint64_t andmax = 0;
2301 /* We used to look for constant operand 0 of AND,
2302 but canonicalization should always make this impossible. */
2303 gcc_checking_assert (GET_CODE (niter) != AND
2304 || !CONST_INT_P (XEXP (niter, 0)));
2306 if (GET_CODE (niter) == AND
2307 && CONST_INT_P (XEXP (niter, 1)))
2309 andmax = UINTVAL (XEXP (niter, 1));
2310 niter = XEXP (niter, 0);
2313 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2314 nmax = INTVAL (mmax) - INTVAL (mmin);
2316 if (GET_CODE (niter) == UDIV)
2318 if (!CONST_INT_P (XEXP (niter, 1)))
2319 return nmax;
2320 inc = INTVAL (XEXP (niter, 1));
2321 niter = XEXP (niter, 0);
2323 else
2324 inc = 1;
2326 /* We could use a binary search here, but for now improving the upper
2327 bound by just one eliminates one important corner case. */
2328 cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2329 desc->mode, old_niter, mmax);
2330 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2331 if (cmp == const_true_rtx)
2333 nmax--;
2335 if (dump_file)
2336 fprintf (dump_file, ";; improved upper bound by one.\n");
2338 nmax /= inc;
2339 if (andmax)
2340 nmax = MIN (nmax, andmax);
2341 if (dump_file)
2342 fprintf (dump_file, ";; Determined upper bound %"PRId64".\n",
2343 nmax);
2344 return nmax;
2347 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2348 the result into DESC. Very similar to determine_number_of_iterations
2349 (basically its rtl version), complicated by things like subregs. */
2351 static void
2352 iv_number_of_iterations (struct loop *loop, rtx_insn *insn, rtx condition,
2353 struct niter_desc *desc)
2355 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2356 struct rtx_iv iv0, iv1, tmp_iv;
2357 rtx assumption, may_not_xform;
2358 enum rtx_code cond;
2359 machine_mode mode, comp_mode;
2360 rtx mmin, mmax, mode_mmin, mode_mmax;
2361 uint64_t s, size, d, inv, max;
2362 int64_t up, down, inc, step_val;
2363 int was_sharp = false;
2364 rtx old_niter;
2365 bool step_is_pow2;
2367 /* The meaning of these assumptions is this:
2368 if !assumptions
2369 then the rest of information does not have to be valid
2370 if noloop_assumptions then the loop does not roll
2371 if infinite then this exit is never used */
2373 desc->assumptions = NULL_RTX;
2374 desc->noloop_assumptions = NULL_RTX;
2375 desc->infinite = NULL_RTX;
2376 desc->simple_p = true;
2378 desc->const_iter = false;
2379 desc->niter_expr = NULL_RTX;
2381 cond = GET_CODE (condition);
2382 gcc_assert (COMPARISON_P (condition));
2384 mode = GET_MODE (XEXP (condition, 0));
2385 if (mode == VOIDmode)
2386 mode = GET_MODE (XEXP (condition, 1));
2387 /* The constant comparisons should be folded. */
2388 gcc_assert (mode != VOIDmode);
2390 /* We only handle integers or pointers. */
2391 if (GET_MODE_CLASS (mode) != MODE_INT
2392 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2393 goto fail;
2395 op0 = XEXP (condition, 0);
2396 if (!iv_analyze (insn, op0, &iv0))
2397 goto fail;
2398 if (iv0.extend_mode == VOIDmode)
2399 iv0.mode = iv0.extend_mode = mode;
2401 op1 = XEXP (condition, 1);
2402 if (!iv_analyze (insn, op1, &iv1))
2403 goto fail;
2404 if (iv1.extend_mode == VOIDmode)
2405 iv1.mode = iv1.extend_mode = mode;
2407 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2408 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2409 goto fail;
2411 /* Check condition and normalize it. */
2413 switch (cond)
2415 case GE:
2416 case GT:
2417 case GEU:
2418 case GTU:
2419 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2420 cond = swap_condition (cond);
2421 break;
2422 case NE:
2423 case LE:
2424 case LEU:
2425 case LT:
2426 case LTU:
2427 break;
2428 default:
2429 goto fail;
2432 /* Handle extends. This is relatively nontrivial, so we only try in some
2433 easy cases, when we can canonicalize the ivs (possibly by adding some
2434 assumptions) to shape subreg (base + i * step). This function also fills
2435 in desc->mode and desc->signed_p. */
2437 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2438 goto fail;
2440 comp_mode = iv0.extend_mode;
2441 mode = iv0.mode;
2442 size = GET_MODE_PRECISION (mode);
2443 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2444 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2445 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2447 if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
2448 goto fail;
2450 /* We can take care of the case of two induction variables chasing each other
2451 if the test is NE. I have never seen a loop using it, but still it is
2452 cool. */
2453 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2455 if (cond != NE)
2456 goto fail;
2458 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2459 iv1.step = const0_rtx;
2462 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2463 iv1.step = lowpart_subreg (mode, iv1.step, comp_mode);
2465 /* This is either infinite loop or the one that ends immediately, depending
2466 on initial values. Unswitching should remove this kind of conditions. */
2467 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2468 goto fail;
2470 if (cond != NE)
2472 if (iv0.step == const0_rtx)
2473 step_val = -INTVAL (iv1.step);
2474 else
2475 step_val = INTVAL (iv0.step);
2477 /* Ignore loops of while (i-- < 10) type. */
2478 if (step_val < 0)
2479 goto fail;
2481 step_is_pow2 = !(step_val & (step_val - 1));
2483 else
2485 /* We do not care about whether the step is power of two in this
2486 case. */
2487 step_is_pow2 = false;
2488 step_val = 0;
2491 /* Some more condition normalization. We must record some assumptions
2492 due to overflows. */
2493 switch (cond)
2495 case LT:
2496 case LTU:
2497 /* We want to take care only of non-sharp relationals; this is easy,
2498 as in cases the overflow would make the transformation unsafe
2499 the loop does not roll. Seemingly it would make more sense to want
2500 to take care of sharp relationals instead, as NE is more similar to
2501 them, but the problem is that here the transformation would be more
2502 difficult due to possibly infinite loops. */
2503 if (iv0.step == const0_rtx)
2505 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2506 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2507 mode_mmax);
2508 if (assumption == const_true_rtx)
2509 goto zero_iter_simplify;
2510 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2511 iv0.base, const1_rtx);
2513 else
2515 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2516 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2517 mode_mmin);
2518 if (assumption == const_true_rtx)
2519 goto zero_iter_simplify;
2520 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2521 iv1.base, constm1_rtx);
2524 if (assumption != const0_rtx)
2525 desc->noloop_assumptions =
2526 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2527 cond = (cond == LT) ? LE : LEU;
2529 /* It will be useful to be able to tell the difference once more in
2530 LE -> NE reduction. */
2531 was_sharp = true;
2532 break;
2533 default: ;
2536 /* Take care of trivially infinite loops. */
2537 if (cond != NE)
2539 if (iv0.step == const0_rtx)
2541 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2542 if (rtx_equal_p (tmp, mode_mmin))
2544 desc->infinite =
2545 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2546 /* Fill in the remaining fields somehow. */
2547 goto zero_iter_simplify;
2550 else
2552 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2553 if (rtx_equal_p (tmp, mode_mmax))
2555 desc->infinite =
2556 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2557 /* Fill in the remaining fields somehow. */
2558 goto zero_iter_simplify;
2563 /* If we can we want to take care of NE conditions instead of size
2564 comparisons, as they are much more friendly (most importantly
2565 this takes care of special handling of loops with step 1). We can
2566 do it if we first check that upper bound is greater or equal to
2567 lower bound, their difference is constant c modulo step and that
2568 there is not an overflow. */
2569 if (cond != NE)
2571 if (iv0.step == const0_rtx)
2572 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2573 else
2574 step = iv0.step;
2575 step = lowpart_subreg (mode, step, comp_mode);
2576 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2577 delta = lowpart_subreg (mode, delta, comp_mode);
2578 delta = simplify_gen_binary (UMOD, mode, delta, step);
2579 may_xform = const0_rtx;
2580 may_not_xform = const_true_rtx;
2582 if (CONST_INT_P (delta))
2584 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2586 /* A special case. We have transformed condition of type
2587 for (i = 0; i < 4; i += 4)
2588 into
2589 for (i = 0; i <= 3; i += 4)
2590 obviously if the test for overflow during that transformation
2591 passed, we cannot overflow here. Most importantly any
2592 loop with sharp end condition and step 1 falls into this
2593 category, so handling this case specially is definitely
2594 worth the troubles. */
2595 may_xform = const_true_rtx;
2597 else if (iv0.step == const0_rtx)
2599 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2600 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2601 bound = lowpart_subreg (mode, bound, comp_mode);
2602 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2603 may_xform = simplify_gen_relational (cond, SImode, mode,
2604 bound, tmp);
2605 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2606 SImode, mode,
2607 bound, tmp);
2609 else
2611 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2612 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2613 bound = lowpart_subreg (mode, bound, comp_mode);
2614 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2615 may_xform = simplify_gen_relational (cond, SImode, mode,
2616 tmp, bound);
2617 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2618 SImode, mode,
2619 tmp, bound);
2623 if (may_xform != const0_rtx)
2625 /* We perform the transformation always provided that it is not
2626 completely senseless. This is OK, as we would need this assumption
2627 to determine the number of iterations anyway. */
2628 if (may_xform != const_true_rtx)
2630 /* If the step is a power of two and the final value we have
2631 computed overflows, the cycle is infinite. Otherwise it
2632 is nontrivial to compute the number of iterations. */
2633 if (step_is_pow2)
2634 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2635 desc->infinite);
2636 else
2637 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2638 desc->assumptions);
2641 /* We are going to lose some information about upper bound on
2642 number of iterations in this step, so record the information
2643 here. */
2644 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2645 if (CONST_INT_P (iv1.base))
2646 up = INTVAL (iv1.base);
2647 else
2648 up = INTVAL (mode_mmax) - inc;
2649 down = INTVAL (CONST_INT_P (iv0.base)
2650 ? iv0.base
2651 : mode_mmin);
2652 max = (up - down) / inc + 1;
2653 if (!desc->infinite
2654 && !desc->assumptions)
2655 record_niter_bound (loop, max, false, true);
2657 if (iv0.step == const0_rtx)
2659 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2660 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2662 else
2664 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2665 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2668 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2669 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2670 assumption = simplify_gen_relational (reverse_condition (cond),
2671 SImode, mode, tmp0, tmp1);
2672 if (assumption == const_true_rtx)
2673 goto zero_iter_simplify;
2674 else if (assumption != const0_rtx)
2675 desc->noloop_assumptions =
2676 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2677 cond = NE;
2681 /* Count the number of iterations. */
2682 if (cond == NE)
2684 /* Everything we do here is just arithmetics modulo size of mode. This
2685 makes us able to do more involved computations of number of iterations
2686 than in other cases. First transform the condition into shape
2687 s * i <> c, with s positive. */
2688 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2689 iv0.base = const0_rtx;
2690 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2691 iv1.step = const0_rtx;
2692 if (INTVAL (iv0.step) < 0)
2694 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, comp_mode);
2695 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, comp_mode);
2697 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2699 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2700 is infinite. Otherwise, the number of iterations is
2701 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2702 s = INTVAL (iv0.step); d = 1;
2703 while (s % 2 != 1)
2705 s /= 2;
2706 d *= 2;
2707 size--;
2709 bound = GEN_INT (((uint64_t) 1 << (size - 1 ) << 1) - 1);
2711 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2712 tmp = simplify_gen_binary (UMOD, mode, tmp1, gen_int_mode (d, mode));
2713 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2714 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2716 tmp = simplify_gen_binary (UDIV, mode, tmp1, gen_int_mode (d, mode));
2717 inv = inverse (s, size);
2718 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2719 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2721 else
2723 if (iv1.step == const0_rtx)
2724 /* Condition in shape a + s * i <= b
2725 We must know that b + s does not overflow and a <= b + s and then we
2726 can compute number of iterations as (b + s - a) / s. (It might
2727 seem that we in fact could be more clever about testing the b + s
2728 overflow condition using some information about b - a mod s,
2729 but it was already taken into account during LE -> NE transform). */
2731 step = iv0.step;
2732 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2733 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2735 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2736 lowpart_subreg (mode, step,
2737 comp_mode));
2738 if (step_is_pow2)
2740 rtx t0, t1;
2742 /* If s is power of 2, we know that the loop is infinite if
2743 a % s <= b % s and b + s overflows. */
2744 assumption = simplify_gen_relational (reverse_condition (cond),
2745 SImode, mode,
2746 tmp1, bound);
2748 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2749 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2750 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2751 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2752 desc->infinite =
2753 alloc_EXPR_LIST (0, assumption, desc->infinite);
2755 else
2757 assumption = simplify_gen_relational (cond, SImode, mode,
2758 tmp1, bound);
2759 desc->assumptions =
2760 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2763 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2764 tmp = lowpart_subreg (mode, tmp, comp_mode);
2765 assumption = simplify_gen_relational (reverse_condition (cond),
2766 SImode, mode, tmp0, tmp);
2768 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2769 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2771 else
2773 /* Condition in shape a <= b - s * i
2774 We must know that a - s does not overflow and a - s <= b and then
2775 we can again compute number of iterations as (b - (a - s)) / s. */
2776 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2777 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2778 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2780 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2781 lowpart_subreg (mode, step, comp_mode));
2782 if (step_is_pow2)
2784 rtx t0, t1;
2786 /* If s is power of 2, we know that the loop is infinite if
2787 a % s <= b % s and a - s overflows. */
2788 assumption = simplify_gen_relational (reverse_condition (cond),
2789 SImode, mode,
2790 bound, tmp0);
2792 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2793 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2794 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2795 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2796 desc->infinite =
2797 alloc_EXPR_LIST (0, assumption, desc->infinite);
2799 else
2801 assumption = simplify_gen_relational (cond, SImode, mode,
2802 bound, tmp0);
2803 desc->assumptions =
2804 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2807 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2808 tmp = lowpart_subreg (mode, tmp, comp_mode);
2809 assumption = simplify_gen_relational (reverse_condition (cond),
2810 SImode, mode,
2811 tmp, tmp1);
2812 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2813 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2815 if (assumption == const_true_rtx)
2816 goto zero_iter_simplify;
2817 else if (assumption != const0_rtx)
2818 desc->noloop_assumptions =
2819 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2820 delta = simplify_gen_binary (UDIV, mode, delta, step);
2821 desc->niter_expr = delta;
2824 old_niter = desc->niter_expr;
2826 simplify_using_initial_values (loop, AND, &desc->assumptions);
2827 if (desc->assumptions
2828 && XEXP (desc->assumptions, 0) == const0_rtx)
2829 goto fail;
2830 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2831 simplify_using_initial_values (loop, IOR, &desc->infinite);
2832 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2834 /* Rerun the simplification. Consider code (created by copying loop headers)
2836 i = 0;
2838 if (0 < n)
2842 i++;
2843 } while (i < n);
2846 The first pass determines that i = 0, the second pass uses it to eliminate
2847 noloop assumption. */
2849 simplify_using_initial_values (loop, AND, &desc->assumptions);
2850 if (desc->assumptions
2851 && XEXP (desc->assumptions, 0) == const0_rtx)
2852 goto fail;
2853 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2854 simplify_using_initial_values (loop, IOR, &desc->infinite);
2855 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2857 if (desc->noloop_assumptions
2858 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2859 goto zero_iter;
2861 if (CONST_INT_P (desc->niter_expr))
2863 uint64_t val = INTVAL (desc->niter_expr);
2865 desc->const_iter = true;
2866 desc->niter = val & GET_MODE_MASK (desc->mode);
2867 if (!desc->infinite
2868 && !desc->assumptions)
2869 record_niter_bound (loop, desc->niter, false, true);
2871 else
2873 max = determine_max_iter (loop, desc, old_niter);
2874 if (!max)
2875 goto zero_iter_simplify;
2876 if (!desc->infinite
2877 && !desc->assumptions)
2878 record_niter_bound (loop, max, false, true);
2880 /* simplify_using_initial_values does a copy propagation on the registers
2881 in the expression for the number of iterations. This prolongs life
2882 ranges of registers and increases register pressure, and usually
2883 brings no gain (and if it happens to do, the cse pass will take care
2884 of it anyway). So prevent this behavior, unless it enabled us to
2885 derive that the number of iterations is a constant. */
2886 desc->niter_expr = old_niter;
2889 return;
2891 zero_iter_simplify:
2892 /* Simplify the assumptions. */
2893 simplify_using_initial_values (loop, AND, &desc->assumptions);
2894 if (desc->assumptions
2895 && XEXP (desc->assumptions, 0) == const0_rtx)
2896 goto fail;
2897 simplify_using_initial_values (loop, IOR, &desc->infinite);
2899 /* Fallthru. */
2900 zero_iter:
2901 desc->const_iter = true;
2902 desc->niter = 0;
2903 record_niter_bound (loop, 0, true, true);
2904 desc->noloop_assumptions = NULL_RTX;
2905 desc->niter_expr = const0_rtx;
2906 return;
2908 fail:
2909 desc->simple_p = false;
2910 return;
2913 /* Checks whether E is a simple exit from LOOP and stores its description
2914 into DESC. */
2916 static void
2917 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2919 basic_block exit_bb;
2920 rtx condition;
2921 rtx_insn *at;
2922 edge ein;
2924 exit_bb = e->src;
2925 desc->simple_p = false;
2927 /* It must belong directly to the loop. */
2928 if (exit_bb->loop_father != loop)
2929 return;
2931 /* It must be tested (at least) once during any iteration. */
2932 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2933 return;
2935 /* It must end in a simple conditional jump. */
2936 if (!any_condjump_p (BB_END (exit_bb)))
2937 return;
2939 ein = EDGE_SUCC (exit_bb, 0);
2940 if (ein == e)
2941 ein = EDGE_SUCC (exit_bb, 1);
2943 desc->out_edge = e;
2944 desc->in_edge = ein;
2946 /* Test whether the condition is suitable. */
2947 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2948 return;
2950 if (ein->flags & EDGE_FALLTHRU)
2952 condition = reversed_condition (condition);
2953 if (!condition)
2954 return;
2957 /* Check that we are able to determine number of iterations and fill
2958 in information about it. */
2959 iv_number_of_iterations (loop, at, condition, desc);
2962 /* Finds a simple exit of LOOP and stores its description into DESC. */
2964 void
2965 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2967 unsigned i;
2968 basic_block *body;
2969 edge e;
2970 struct niter_desc act;
2971 bool any = false;
2972 edge_iterator ei;
2974 desc->simple_p = false;
2975 body = get_loop_body (loop);
2977 for (i = 0; i < loop->num_nodes; i++)
2979 FOR_EACH_EDGE (e, ei, body[i]->succs)
2981 if (flow_bb_inside_loop_p (loop, e->dest))
2982 continue;
2984 check_simple_exit (loop, e, &act);
2985 if (!act.simple_p)
2986 continue;
2988 if (!any)
2989 any = true;
2990 else
2992 /* Prefer constant iterations; the less the better. */
2993 if (!act.const_iter
2994 || (desc->const_iter && act.niter >= desc->niter))
2995 continue;
2997 /* Also if the actual exit may be infinite, while the old one
2998 not, prefer the old one. */
2999 if (act.infinite && !desc->infinite)
3000 continue;
3003 *desc = act;
3007 if (dump_file)
3009 if (desc->simple_p)
3011 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
3012 fprintf (dump_file, " simple exit %d -> %d\n",
3013 desc->out_edge->src->index,
3014 desc->out_edge->dest->index);
3015 if (desc->assumptions)
3017 fprintf (dump_file, " assumptions: ");
3018 print_rtl (dump_file, desc->assumptions);
3019 fprintf (dump_file, "\n");
3021 if (desc->noloop_assumptions)
3023 fprintf (dump_file, " does not roll if: ");
3024 print_rtl (dump_file, desc->noloop_assumptions);
3025 fprintf (dump_file, "\n");
3027 if (desc->infinite)
3029 fprintf (dump_file, " infinite if: ");
3030 print_rtl (dump_file, desc->infinite);
3031 fprintf (dump_file, "\n");
3034 fprintf (dump_file, " number of iterations: ");
3035 print_rtl (dump_file, desc->niter_expr);
3036 fprintf (dump_file, "\n");
3038 fprintf (dump_file, " upper bound: %li\n",
3039 (long)get_max_loop_iterations_int (loop));
3040 fprintf (dump_file, " realistic bound: %li\n",
3041 (long)get_estimated_loop_iterations_int (loop));
3043 else
3044 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
3047 free (body);
3050 /* Creates a simple loop description of LOOP if it was not computed
3051 already. */
3053 struct niter_desc *
3054 get_simple_loop_desc (struct loop *loop)
3056 struct niter_desc *desc = simple_loop_desc (loop);
3058 if (desc)
3059 return desc;
3061 /* At least desc->infinite is not always initialized by
3062 find_simple_loop_exit. */
3063 desc = ggc_cleared_alloc<niter_desc> ();
3064 iv_analysis_loop_init (loop);
3065 find_simple_exit (loop, desc);
3066 loop->simple_loop_desc = desc;
3068 if (desc->simple_p && (desc->assumptions || desc->infinite))
3070 const char *wording;
3072 /* Assume that no overflow happens and that the loop is finite.
3073 We already warned at the tree level if we ran optimizations there. */
3074 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
3076 if (desc->infinite)
3078 wording =
3079 flag_unsafe_loop_optimizations
3080 ? N_("assuming that the loop is not infinite")
3081 : N_("cannot optimize possibly infinite loops");
3082 warning (OPT_Wunsafe_loop_optimizations, "%s",
3083 gettext (wording));
3085 if (desc->assumptions)
3087 wording =
3088 flag_unsafe_loop_optimizations
3089 ? N_("assuming that the loop counter does not overflow")
3090 : N_("cannot optimize loop, the loop counter may overflow");
3091 warning (OPT_Wunsafe_loop_optimizations, "%s",
3092 gettext (wording));
3096 if (flag_unsafe_loop_optimizations)
3098 desc->assumptions = NULL_RTX;
3099 desc->infinite = NULL_RTX;
3103 return desc;
3106 /* Releases simple loop description for LOOP. */
3108 void
3109 free_simple_loop_desc (struct loop *loop)
3111 struct niter_desc *desc = simple_loop_desc (loop);
3113 if (!desc)
3114 return;
3116 ggc_free (desc);
3117 loop->simple_loop_desc = NULL;