libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / loop-iv.c
blob59344487e74c58c772aafb6def886e1f59eb5b6e
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is a simple analysis of induction variables of the loop. The major use
21 is for determining the number of iterations of a loop for loop unrolling,
22 doloop optimization and branch prediction. The iv information is computed
23 on demand.
25 Induction variables are analyzed by walking the use-def chains. When
26 a basic induction variable (biv) is found, it is cached in the bivs
27 hash table. When register is proved to be a biv, its description
28 is stored to DF_REF_DATA of the def reference.
30 The analysis works always with one loop -- you must call
31 iv_analysis_loop_init (loop) for it. All the other functions then work with
32 this loop. When you need to work with another loop, just call
33 iv_analysis_loop_init for it. When you no longer need iv analysis, call
34 iv_analysis_done () to clean up the memory.
36 The available functions are:
38 iv_analyze (insn, reg, iv): Stores the description of the induction variable
39 corresponding to the use of register REG in INSN to IV. Returns true if
40 REG is an induction variable in INSN. false otherwise.
41 If use of REG is not found in INSN, following insns are scanned (so that
42 we may call this function on insn returned by get_condition).
43 iv_analyze_result (insn, def, iv): Stores to IV the description of the iv
44 corresponding to DEF, which is a register defined in INSN.
45 iv_analyze_expr (insn, rhs, mode, iv): Stores to IV the description of iv
46 corresponding to expression EXPR evaluated at INSN. All registers used bu
47 EXPR must also be used in INSN.
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "predict.h"
58 #include "vec.h"
59 #include "hashtab.h"
60 #include "hash-set.h"
61 #include "machmode.h"
62 #include "input.h"
63 #include "function.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "cfgloop.h"
68 #include "symtab.h"
69 #include "expr.h"
70 #include "intl.h"
71 #include "diagnostic-core.h"
72 #include "df.h"
73 #include "hash-table.h"
74 #include "dumpfile.h"
75 #include "rtl-iter.h"
77 /* Possible return values of iv_get_reaching_def. */
79 enum iv_grd_result
81 /* More than one reaching def, or reaching def that does not
82 dominate the use. */
83 GRD_INVALID,
85 /* The use is trivial invariant of the loop, i.e. is not changed
86 inside the loop. */
87 GRD_INVARIANT,
89 /* The use is reached by initial value and a value from the
90 previous iteration. */
91 GRD_MAYBE_BIV,
93 /* The use has single dominating def. */
94 GRD_SINGLE_DOM
97 /* Information about a biv. */
99 struct biv_entry
101 unsigned regno; /* The register of the biv. */
102 struct rtx_iv iv; /* Value of the biv. */
105 static bool clean_slate = true;
107 static unsigned int iv_ref_table_size = 0;
109 /* Table of rtx_ivs indexed by the df_ref uid field. */
110 static struct rtx_iv ** iv_ref_table;
112 /* Induction variable stored at the reference. */
113 #define DF_REF_IV(REF) iv_ref_table[DF_REF_ID (REF)]
114 #define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID (REF)] = (IV)
116 /* The current loop. */
118 static struct loop *current_loop;
120 /* Hashtable helper. */
122 struct biv_entry_hasher : typed_free_remove <biv_entry>
124 typedef biv_entry value_type;
125 typedef rtx_def compare_type;
126 static inline hashval_t hash (const value_type *);
127 static inline bool equal (const value_type *, const compare_type *);
130 /* Returns hash value for biv B. */
132 inline hashval_t
133 biv_entry_hasher::hash (const value_type *b)
135 return b->regno;
138 /* Compares biv B and register R. */
140 inline bool
141 biv_entry_hasher::equal (const value_type *b, const compare_type *r)
143 return b->regno == REGNO (r);
146 /* Bivs of the current loop. */
148 static hash_table<biv_entry_hasher> *bivs;
150 static bool iv_analyze_op (rtx_insn *, rtx, struct rtx_iv *);
152 /* Return the RTX code corresponding to the IV extend code EXTEND. */
153 static inline enum rtx_code
154 iv_extend_to_rtx_code (enum iv_extend_code extend)
156 switch (extend)
158 case IV_SIGN_EXTEND:
159 return SIGN_EXTEND;
160 case IV_ZERO_EXTEND:
161 return ZERO_EXTEND;
162 case IV_UNKNOWN_EXTEND:
163 return UNKNOWN;
165 gcc_unreachable ();
168 /* Dumps information about IV to FILE. */
170 extern void dump_iv_info (FILE *, struct rtx_iv *);
171 void
172 dump_iv_info (FILE *file, struct rtx_iv *iv)
174 if (!iv->base)
176 fprintf (file, "not simple");
177 return;
180 if (iv->step == const0_rtx
181 && !iv->first_special)
182 fprintf (file, "invariant ");
184 print_rtl (file, iv->base);
185 if (iv->step != const0_rtx)
187 fprintf (file, " + ");
188 print_rtl (file, iv->step);
189 fprintf (file, " * iteration");
191 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
193 if (iv->mode != iv->extend_mode)
194 fprintf (file, " %s to %s",
195 rtx_name[iv_extend_to_rtx_code (iv->extend)],
196 GET_MODE_NAME (iv->extend_mode));
198 if (iv->mult != const1_rtx)
200 fprintf (file, " * ");
201 print_rtl (file, iv->mult);
203 if (iv->delta != const0_rtx)
205 fprintf (file, " + ");
206 print_rtl (file, iv->delta);
208 if (iv->first_special)
209 fprintf (file, " (first special)");
212 /* Generates a subreg to get the least significant part of EXPR (in mode
213 INNER_MODE) to OUTER_MODE. */
216 lowpart_subreg (machine_mode outer_mode, rtx expr,
217 machine_mode inner_mode)
219 return simplify_gen_subreg (outer_mode, expr, inner_mode,
220 subreg_lowpart_offset (outer_mode, inner_mode));
223 static void
224 check_iv_ref_table_size (void)
226 if (iv_ref_table_size < DF_DEFS_TABLE_SIZE ())
228 unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
229 iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
230 memset (&iv_ref_table[iv_ref_table_size], 0,
231 (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
232 iv_ref_table_size = new_size;
237 /* Checks whether REG is a well-behaved register. */
239 static bool
240 simple_reg_p (rtx reg)
242 unsigned r;
244 if (GET_CODE (reg) == SUBREG)
246 if (!subreg_lowpart_p (reg))
247 return false;
248 reg = SUBREG_REG (reg);
251 if (!REG_P (reg))
252 return false;
254 r = REGNO (reg);
255 if (HARD_REGISTER_NUM_P (r))
256 return false;
258 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
259 return false;
261 return true;
264 /* Clears the information about ivs stored in df. */
266 static void
267 clear_iv_info (void)
269 unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
270 struct rtx_iv *iv;
272 check_iv_ref_table_size ();
273 for (i = 0; i < n_defs; i++)
275 iv = iv_ref_table[i];
276 if (iv)
278 free (iv);
279 iv_ref_table[i] = NULL;
283 bivs->empty ();
287 /* Prepare the data for an induction variable analysis of a LOOP. */
289 void
290 iv_analysis_loop_init (struct loop *loop)
292 current_loop = loop;
294 /* Clear the information from the analysis of the previous loop. */
295 if (clean_slate)
297 df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
298 bivs = new hash_table<biv_entry_hasher> (10);
299 clean_slate = false;
301 else
302 clear_iv_info ();
304 /* Get rid of the ud chains before processing the rescans. Then add
305 the problem back. */
306 df_remove_problem (df_chain);
307 df_process_deferred_rescans ();
308 df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
309 df_chain_add_problem (DF_UD_CHAIN);
310 df_note_add_problem ();
311 df_analyze_loop (loop);
312 if (dump_file)
313 df_dump_region (dump_file);
315 check_iv_ref_table_size ();
318 /* Finds the definition of REG that dominates loop latch and stores
319 it to DEF. Returns false if there is not a single definition
320 dominating the latch. If REG has no definition in loop, DEF
321 is set to NULL and true is returned. */
323 static bool
324 latch_dominating_def (rtx reg, df_ref *def)
326 df_ref single_rd = NULL, adef;
327 unsigned regno = REGNO (reg);
328 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
330 for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
332 if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
333 || !bitmap_bit_p (&bb_info->out, DF_REF_ID (adef)))
334 continue;
336 /* More than one reaching definition. */
337 if (single_rd)
338 return false;
340 if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
341 return false;
343 single_rd = adef;
346 *def = single_rd;
347 return true;
350 /* Gets definition of REG reaching its use in INSN and stores it to DEF. */
352 static enum iv_grd_result
353 iv_get_reaching_def (rtx_insn *insn, rtx reg, df_ref *def)
355 df_ref use, adef;
356 basic_block def_bb, use_bb;
357 rtx_insn *def_insn;
358 bool dom_p;
360 *def = NULL;
361 if (!simple_reg_p (reg))
362 return GRD_INVALID;
363 if (GET_CODE (reg) == SUBREG)
364 reg = SUBREG_REG (reg);
365 gcc_assert (REG_P (reg));
367 use = df_find_use (insn, reg);
368 gcc_assert (use != NULL);
370 if (!DF_REF_CHAIN (use))
371 return GRD_INVARIANT;
373 /* More than one reaching def. */
374 if (DF_REF_CHAIN (use)->next)
375 return GRD_INVALID;
377 adef = DF_REF_CHAIN (use)->ref;
379 /* We do not handle setting only part of the register. */
380 if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
381 return GRD_INVALID;
383 def_insn = DF_REF_INSN (adef);
384 def_bb = DF_REF_BB (adef);
385 use_bb = BLOCK_FOR_INSN (insn);
387 if (use_bb == def_bb)
388 dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
389 else
390 dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
392 if (dom_p)
394 *def = adef;
395 return GRD_SINGLE_DOM;
398 /* The definition does not dominate the use. This is still OK if
399 this may be a use of a biv, i.e. if the def_bb dominates loop
400 latch. */
401 if (just_once_each_iteration_p (current_loop, def_bb))
402 return GRD_MAYBE_BIV;
404 return GRD_INVALID;
407 /* Sets IV to invariant CST in MODE. Always returns true (just for
408 consistency with other iv manipulation functions that may fail). */
410 static bool
411 iv_constant (struct rtx_iv *iv, rtx cst, machine_mode mode)
413 if (mode == VOIDmode)
414 mode = GET_MODE (cst);
416 iv->mode = mode;
417 iv->base = cst;
418 iv->step = const0_rtx;
419 iv->first_special = false;
420 iv->extend = IV_UNKNOWN_EXTEND;
421 iv->extend_mode = iv->mode;
422 iv->delta = const0_rtx;
423 iv->mult = const1_rtx;
425 return true;
428 /* Evaluates application of subreg to MODE on IV. */
430 static bool
431 iv_subreg (struct rtx_iv *iv, machine_mode mode)
433 /* If iv is invariant, just calculate the new value. */
434 if (iv->step == const0_rtx
435 && !iv->first_special)
437 rtx val = get_iv_value (iv, const0_rtx);
438 val = lowpart_subreg (mode, val,
439 iv->extend == IV_UNKNOWN_EXTEND
440 ? iv->mode : iv->extend_mode);
442 iv->base = val;
443 iv->extend = IV_UNKNOWN_EXTEND;
444 iv->mode = iv->extend_mode = mode;
445 iv->delta = const0_rtx;
446 iv->mult = const1_rtx;
447 return true;
450 if (iv->extend_mode == mode)
451 return true;
453 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
454 return false;
456 iv->extend = IV_UNKNOWN_EXTEND;
457 iv->mode = mode;
459 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
460 simplify_gen_binary (MULT, iv->extend_mode,
461 iv->base, iv->mult));
462 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
463 iv->mult = const1_rtx;
464 iv->delta = const0_rtx;
465 iv->first_special = false;
467 return true;
470 /* Evaluates application of EXTEND to MODE on IV. */
472 static bool
473 iv_extend (struct rtx_iv *iv, enum iv_extend_code extend, machine_mode mode)
475 /* If iv is invariant, just calculate the new value. */
476 if (iv->step == const0_rtx
477 && !iv->first_special)
479 rtx val = get_iv_value (iv, const0_rtx);
480 if (iv->extend_mode != iv->mode
481 && iv->extend != IV_UNKNOWN_EXTEND
482 && iv->extend != extend)
483 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
484 val = simplify_gen_unary (iv_extend_to_rtx_code (extend), mode,
485 val,
486 iv->extend == extend
487 ? iv->extend_mode : iv->mode);
488 iv->base = val;
489 iv->extend = IV_UNKNOWN_EXTEND;
490 iv->mode = iv->extend_mode = mode;
491 iv->delta = const0_rtx;
492 iv->mult = const1_rtx;
493 return true;
496 if (mode != iv->extend_mode)
497 return false;
499 if (iv->extend != IV_UNKNOWN_EXTEND
500 && iv->extend != extend)
501 return false;
503 iv->extend = extend;
505 return true;
508 /* Evaluates negation of IV. */
510 static bool
511 iv_neg (struct rtx_iv *iv)
513 if (iv->extend == IV_UNKNOWN_EXTEND)
515 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
516 iv->base, iv->extend_mode);
517 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
518 iv->step, iv->extend_mode);
520 else
522 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
523 iv->delta, iv->extend_mode);
524 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
525 iv->mult, iv->extend_mode);
528 return true;
531 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
533 static bool
534 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
536 machine_mode mode;
537 rtx arg;
539 /* Extend the constant to extend_mode of the other operand if necessary. */
540 if (iv0->extend == IV_UNKNOWN_EXTEND
541 && iv0->mode == iv0->extend_mode
542 && iv0->step == const0_rtx
543 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
545 iv0->extend_mode = iv1->extend_mode;
546 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
547 iv0->base, iv0->mode);
549 if (iv1->extend == IV_UNKNOWN_EXTEND
550 && iv1->mode == iv1->extend_mode
551 && iv1->step == const0_rtx
552 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
554 iv1->extend_mode = iv0->extend_mode;
555 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
556 iv1->base, iv1->mode);
559 mode = iv0->extend_mode;
560 if (mode != iv1->extend_mode)
561 return false;
563 if (iv0->extend == IV_UNKNOWN_EXTEND
564 && iv1->extend == IV_UNKNOWN_EXTEND)
566 if (iv0->mode != iv1->mode)
567 return false;
569 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
570 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
572 return true;
575 /* Handle addition of constant. */
576 if (iv1->extend == IV_UNKNOWN_EXTEND
577 && iv1->mode == mode
578 && iv1->step == const0_rtx)
580 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
581 return true;
584 if (iv0->extend == IV_UNKNOWN_EXTEND
585 && iv0->mode == mode
586 && iv0->step == const0_rtx)
588 arg = iv0->base;
589 *iv0 = *iv1;
590 if (op == MINUS
591 && !iv_neg (iv0))
592 return false;
594 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
595 return true;
598 return false;
601 /* Evaluates multiplication of IV by constant CST. */
603 static bool
604 iv_mult (struct rtx_iv *iv, rtx mby)
606 machine_mode mode = iv->extend_mode;
608 if (GET_MODE (mby) != VOIDmode
609 && GET_MODE (mby) != mode)
610 return false;
612 if (iv->extend == IV_UNKNOWN_EXTEND)
614 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
615 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
617 else
619 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
620 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
623 return true;
626 /* Evaluates shift of IV by constant CST. */
628 static bool
629 iv_shift (struct rtx_iv *iv, rtx mby)
631 machine_mode mode = iv->extend_mode;
633 if (GET_MODE (mby) != VOIDmode
634 && GET_MODE (mby) != mode)
635 return false;
637 if (iv->extend == IV_UNKNOWN_EXTEND)
639 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
640 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
642 else
644 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
645 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
648 return true;
651 /* The recursive part of get_biv_step. Gets the value of the single value
652 defined by DEF wrto initial value of REG inside loop, in shape described
653 at get_biv_step. */
655 static bool
656 get_biv_step_1 (df_ref def, rtx reg,
657 rtx *inner_step, machine_mode *inner_mode,
658 enum iv_extend_code *extend, machine_mode outer_mode,
659 rtx *outer_step)
661 rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
662 rtx next, nextr, tmp;
663 enum rtx_code code;
664 rtx_insn *insn = DF_REF_INSN (def);
665 df_ref next_def;
666 enum iv_grd_result res;
668 set = single_set (insn);
669 if (!set)
670 return false;
672 rhs = find_reg_equal_equiv_note (insn);
673 if (rhs)
674 rhs = XEXP (rhs, 0);
675 else
676 rhs = SET_SRC (set);
678 code = GET_CODE (rhs);
679 switch (code)
681 case SUBREG:
682 case REG:
683 next = rhs;
684 break;
686 case PLUS:
687 case MINUS:
688 op0 = XEXP (rhs, 0);
689 op1 = XEXP (rhs, 1);
691 if (code == PLUS && CONSTANT_P (op0))
693 tmp = op0; op0 = op1; op1 = tmp;
696 if (!simple_reg_p (op0)
697 || !CONSTANT_P (op1))
698 return false;
700 if (GET_MODE (rhs) != outer_mode)
702 /* ppc64 uses expressions like
704 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
706 this is equivalent to
708 (set x':DI (plus:DI y:DI 1))
709 (set x:SI (subreg:SI (x':DI)). */
710 if (GET_CODE (op0) != SUBREG)
711 return false;
712 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
713 return false;
716 next = op0;
717 break;
719 case SIGN_EXTEND:
720 case ZERO_EXTEND:
721 if (GET_MODE (rhs) != outer_mode)
722 return false;
724 op0 = XEXP (rhs, 0);
725 if (!simple_reg_p (op0))
726 return false;
728 next = op0;
729 break;
731 default:
732 return false;
735 if (GET_CODE (next) == SUBREG)
737 if (!subreg_lowpart_p (next))
738 return false;
740 nextr = SUBREG_REG (next);
741 if (GET_MODE (nextr) != outer_mode)
742 return false;
744 else
745 nextr = next;
747 res = iv_get_reaching_def (insn, nextr, &next_def);
749 if (res == GRD_INVALID || res == GRD_INVARIANT)
750 return false;
752 if (res == GRD_MAYBE_BIV)
754 if (!rtx_equal_p (nextr, reg))
755 return false;
757 *inner_step = const0_rtx;
758 *extend = IV_UNKNOWN_EXTEND;
759 *inner_mode = outer_mode;
760 *outer_step = const0_rtx;
762 else if (!get_biv_step_1 (next_def, reg,
763 inner_step, inner_mode, extend, outer_mode,
764 outer_step))
765 return false;
767 if (GET_CODE (next) == SUBREG)
769 machine_mode amode = GET_MODE (next);
771 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
772 return false;
774 *inner_mode = amode;
775 *inner_step = simplify_gen_binary (PLUS, outer_mode,
776 *inner_step, *outer_step);
777 *outer_step = const0_rtx;
778 *extend = IV_UNKNOWN_EXTEND;
781 switch (code)
783 case REG:
784 case SUBREG:
785 break;
787 case PLUS:
788 case MINUS:
789 if (*inner_mode == outer_mode
790 /* See comment in previous switch. */
791 || GET_MODE (rhs) != outer_mode)
792 *inner_step = simplify_gen_binary (code, outer_mode,
793 *inner_step, op1);
794 else
795 *outer_step = simplify_gen_binary (code, outer_mode,
796 *outer_step, op1);
797 break;
799 case SIGN_EXTEND:
800 case ZERO_EXTEND:
801 gcc_assert (GET_MODE (op0) == *inner_mode
802 && *extend == IV_UNKNOWN_EXTEND
803 && *outer_step == const0_rtx);
805 *extend = (code == SIGN_EXTEND) ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
806 break;
808 default:
809 return false;
812 return true;
815 /* Gets the operation on register REG inside loop, in shape
817 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
819 If the operation cannot be described in this shape, return false.
820 LAST_DEF is the definition of REG that dominates loop latch. */
822 static bool
823 get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
824 machine_mode *inner_mode, enum iv_extend_code *extend,
825 machine_mode *outer_mode, rtx *outer_step)
827 *outer_mode = GET_MODE (reg);
829 if (!get_biv_step_1 (last_def, reg,
830 inner_step, inner_mode, extend, *outer_mode,
831 outer_step))
832 return false;
834 gcc_assert ((*inner_mode == *outer_mode) != (*extend != IV_UNKNOWN_EXTEND));
835 gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
837 return true;
840 /* Records information that DEF is induction variable IV. */
842 static void
843 record_iv (df_ref def, struct rtx_iv *iv)
845 struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
847 *recorded_iv = *iv;
848 check_iv_ref_table_size ();
849 DF_REF_IV_SET (def, recorded_iv);
852 /* If DEF was already analyzed for bivness, store the description of the biv to
853 IV and return true. Otherwise return false. */
855 static bool
856 analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
858 struct biv_entry *biv = bivs->find_with_hash (def, REGNO (def));
860 if (!biv)
861 return false;
863 *iv = biv->iv;
864 return true;
867 static void
868 record_biv (rtx def, struct rtx_iv *iv)
870 struct biv_entry *biv = XNEW (struct biv_entry);
871 biv_entry **slot = bivs->find_slot_with_hash (def, REGNO (def), INSERT);
873 biv->regno = REGNO (def);
874 biv->iv = *iv;
875 gcc_assert (!*slot);
876 *slot = biv;
879 /* Determines whether DEF is a biv and if so, stores its description
880 to *IV. */
882 static bool
883 iv_analyze_biv (rtx def, struct rtx_iv *iv)
885 rtx inner_step, outer_step;
886 machine_mode inner_mode, outer_mode;
887 enum iv_extend_code extend;
888 df_ref last_def;
890 if (dump_file)
892 fprintf (dump_file, "Analyzing ");
893 print_rtl (dump_file, def);
894 fprintf (dump_file, " for bivness.\n");
897 if (!REG_P (def))
899 if (!CONSTANT_P (def))
900 return false;
902 return iv_constant (iv, def, VOIDmode);
905 if (!latch_dominating_def (def, &last_def))
907 if (dump_file)
908 fprintf (dump_file, " not simple.\n");
909 return false;
912 if (!last_def)
913 return iv_constant (iv, def, VOIDmode);
915 if (analyzed_for_bivness_p (def, iv))
917 if (dump_file)
918 fprintf (dump_file, " already analysed.\n");
919 return iv->base != NULL_RTX;
922 if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
923 &outer_mode, &outer_step))
925 iv->base = NULL_RTX;
926 goto end;
929 /* Loop transforms base to es (base + inner_step) + outer_step,
930 where es means extend of subreg between inner_mode and outer_mode.
931 The corresponding induction variable is
933 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
935 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
936 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
937 iv->mode = inner_mode;
938 iv->extend_mode = outer_mode;
939 iv->extend = extend;
940 iv->mult = const1_rtx;
941 iv->delta = outer_step;
942 iv->first_special = inner_mode != outer_mode;
944 end:
945 if (dump_file)
947 fprintf (dump_file, " ");
948 dump_iv_info (dump_file, iv);
949 fprintf (dump_file, "\n");
952 record_biv (def, iv);
953 return iv->base != NULL_RTX;
956 /* Analyzes expression RHS used at INSN and stores the result to *IV.
957 The mode of the induction variable is MODE. */
959 bool
960 iv_analyze_expr (rtx_insn *insn, rtx rhs, machine_mode mode,
961 struct rtx_iv *iv)
963 rtx mby = NULL_RTX, tmp;
964 rtx op0 = NULL_RTX, op1 = NULL_RTX;
965 struct rtx_iv iv0, iv1;
966 enum rtx_code code = GET_CODE (rhs);
967 machine_mode omode = mode;
969 iv->mode = VOIDmode;
970 iv->base = NULL_RTX;
971 iv->step = NULL_RTX;
973 gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
975 if (CONSTANT_P (rhs)
976 || REG_P (rhs)
977 || code == SUBREG)
979 if (!iv_analyze_op (insn, rhs, iv))
980 return false;
982 if (iv->mode == VOIDmode)
984 iv->mode = mode;
985 iv->extend_mode = mode;
988 return true;
991 switch (code)
993 case REG:
994 op0 = rhs;
995 break;
997 case SIGN_EXTEND:
998 case ZERO_EXTEND:
999 case NEG:
1000 op0 = XEXP (rhs, 0);
1001 omode = GET_MODE (op0);
1002 break;
1004 case PLUS:
1005 case MINUS:
1006 op0 = XEXP (rhs, 0);
1007 op1 = XEXP (rhs, 1);
1008 break;
1010 case MULT:
1011 op0 = XEXP (rhs, 0);
1012 mby = XEXP (rhs, 1);
1013 if (!CONSTANT_P (mby))
1015 tmp = op0;
1016 op0 = mby;
1017 mby = tmp;
1019 if (!CONSTANT_P (mby))
1020 return false;
1021 break;
1023 case ASHIFT:
1024 op0 = XEXP (rhs, 0);
1025 mby = XEXP (rhs, 1);
1026 if (!CONSTANT_P (mby))
1027 return false;
1028 break;
1030 default:
1031 return false;
1034 if (op0
1035 && !iv_analyze_expr (insn, op0, omode, &iv0))
1036 return false;
1038 if (op1
1039 && !iv_analyze_expr (insn, op1, omode, &iv1))
1040 return false;
1042 switch (code)
1044 case SIGN_EXTEND:
1045 if (!iv_extend (&iv0, IV_SIGN_EXTEND, mode))
1046 return false;
1047 break;
1049 case ZERO_EXTEND:
1050 if (!iv_extend (&iv0, IV_ZERO_EXTEND, mode))
1051 return false;
1052 break;
1054 case NEG:
1055 if (!iv_neg (&iv0))
1056 return false;
1057 break;
1059 case PLUS:
1060 case MINUS:
1061 if (!iv_add (&iv0, &iv1, code))
1062 return false;
1063 break;
1065 case MULT:
1066 if (!iv_mult (&iv0, mby))
1067 return false;
1068 break;
1070 case ASHIFT:
1071 if (!iv_shift (&iv0, mby))
1072 return false;
1073 break;
1075 default:
1076 break;
1079 *iv = iv0;
1080 return iv->base != NULL_RTX;
1083 /* Analyzes iv DEF and stores the result to *IV. */
1085 static bool
1086 iv_analyze_def (df_ref def, struct rtx_iv *iv)
1088 rtx_insn *insn = DF_REF_INSN (def);
1089 rtx reg = DF_REF_REG (def);
1090 rtx set, rhs;
1092 if (dump_file)
1094 fprintf (dump_file, "Analyzing def of ");
1095 print_rtl (dump_file, reg);
1096 fprintf (dump_file, " in insn ");
1097 print_rtl_single (dump_file, insn);
1100 check_iv_ref_table_size ();
1101 if (DF_REF_IV (def))
1103 if (dump_file)
1104 fprintf (dump_file, " already analysed.\n");
1105 *iv = *DF_REF_IV (def);
1106 return iv->base != NULL_RTX;
1109 iv->mode = VOIDmode;
1110 iv->base = NULL_RTX;
1111 iv->step = NULL_RTX;
1113 if (!REG_P (reg))
1114 return false;
1116 set = single_set (insn);
1117 if (!set)
1118 return false;
1120 if (!REG_P (SET_DEST (set)))
1121 return false;
1123 gcc_assert (SET_DEST (set) == reg);
1124 rhs = find_reg_equal_equiv_note (insn);
1125 if (rhs)
1126 rhs = XEXP (rhs, 0);
1127 else
1128 rhs = SET_SRC (set);
1130 iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1131 record_iv (def, iv);
1133 if (dump_file)
1135 print_rtl (dump_file, reg);
1136 fprintf (dump_file, " in insn ");
1137 print_rtl_single (dump_file, insn);
1138 fprintf (dump_file, " is ");
1139 dump_iv_info (dump_file, iv);
1140 fprintf (dump_file, "\n");
1143 return iv->base != NULL_RTX;
1146 /* Analyzes operand OP of INSN and stores the result to *IV. */
1148 static bool
1149 iv_analyze_op (rtx_insn *insn, rtx op, struct rtx_iv *iv)
1151 df_ref def = NULL;
1152 enum iv_grd_result res;
1154 if (dump_file)
1156 fprintf (dump_file, "Analyzing operand ");
1157 print_rtl (dump_file, op);
1158 fprintf (dump_file, " of insn ");
1159 print_rtl_single (dump_file, insn);
1162 if (function_invariant_p (op))
1163 res = GRD_INVARIANT;
1164 else if (GET_CODE (op) == SUBREG)
1166 if (!subreg_lowpart_p (op))
1167 return false;
1169 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1170 return false;
1172 return iv_subreg (iv, GET_MODE (op));
1174 else
1176 res = iv_get_reaching_def (insn, op, &def);
1177 if (res == GRD_INVALID)
1179 if (dump_file)
1180 fprintf (dump_file, " not simple.\n");
1181 return false;
1185 if (res == GRD_INVARIANT)
1187 iv_constant (iv, op, VOIDmode);
1189 if (dump_file)
1191 fprintf (dump_file, " ");
1192 dump_iv_info (dump_file, iv);
1193 fprintf (dump_file, "\n");
1195 return true;
1198 if (res == GRD_MAYBE_BIV)
1199 return iv_analyze_biv (op, iv);
1201 return iv_analyze_def (def, iv);
1204 /* Analyzes value VAL at INSN and stores the result to *IV. */
1206 bool
1207 iv_analyze (rtx_insn *insn, rtx val, struct rtx_iv *iv)
1209 rtx reg;
1211 /* We must find the insn in that val is used, so that we get to UD chains.
1212 Since the function is sometimes called on result of get_condition,
1213 this does not necessarily have to be directly INSN; scan also the
1214 following insns. */
1215 if (simple_reg_p (val))
1217 if (GET_CODE (val) == SUBREG)
1218 reg = SUBREG_REG (val);
1219 else
1220 reg = val;
1222 while (!df_find_use (insn, reg))
1223 insn = NEXT_INSN (insn);
1226 return iv_analyze_op (insn, val, iv);
1229 /* Analyzes definition of DEF in INSN and stores the result to IV. */
1231 bool
1232 iv_analyze_result (rtx_insn *insn, rtx def, struct rtx_iv *iv)
1234 df_ref adef;
1236 adef = df_find_def (insn, def);
1237 if (!adef)
1238 return false;
1240 return iv_analyze_def (adef, iv);
1243 /* Checks whether definition of register REG in INSN is a basic induction
1244 variable. IV analysis must have been initialized (via a call to
1245 iv_analysis_loop_init) for this function to produce a result. */
1247 bool
1248 biv_p (rtx_insn *insn, rtx reg)
1250 struct rtx_iv iv;
1251 df_ref def, last_def;
1253 if (!simple_reg_p (reg))
1254 return false;
1256 def = df_find_def (insn, reg);
1257 gcc_assert (def != NULL);
1258 if (!latch_dominating_def (reg, &last_def))
1259 return false;
1260 if (last_def != def)
1261 return false;
1263 if (!iv_analyze_biv (reg, &iv))
1264 return false;
1266 return iv.step != const0_rtx;
1269 /* Calculates value of IV at ITERATION-th iteration. */
1272 get_iv_value (struct rtx_iv *iv, rtx iteration)
1274 rtx val;
1276 /* We would need to generate some if_then_else patterns, and so far
1277 it is not needed anywhere. */
1278 gcc_assert (!iv->first_special);
1280 if (iv->step != const0_rtx && iteration != const0_rtx)
1281 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1282 simplify_gen_binary (MULT, iv->extend_mode,
1283 iv->step, iteration));
1284 else
1285 val = iv->base;
1287 if (iv->extend_mode == iv->mode)
1288 return val;
1290 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1292 if (iv->extend == IV_UNKNOWN_EXTEND)
1293 return val;
1295 val = simplify_gen_unary (iv_extend_to_rtx_code (iv->extend),
1296 iv->extend_mode, val, iv->mode);
1297 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1298 simplify_gen_binary (MULT, iv->extend_mode,
1299 iv->mult, val));
1301 return val;
1304 /* Free the data for an induction variable analysis. */
1306 void
1307 iv_analysis_done (void)
1309 if (!clean_slate)
1311 clear_iv_info ();
1312 clean_slate = true;
1313 df_finish_pass (true);
1314 delete bivs;
1315 bivs = NULL;
1316 free (iv_ref_table);
1317 iv_ref_table = NULL;
1318 iv_ref_table_size = 0;
1322 /* Computes inverse to X modulo (1 << MOD). */
1324 static uint64_t
1325 inverse (uint64_t x, int mod)
1327 uint64_t mask =
1328 ((uint64_t) 1 << (mod - 1) << 1) - 1;
1329 uint64_t rslt = 1;
1330 int i;
1332 for (i = 0; i < mod - 1; i++)
1334 rslt = (rslt * x) & mask;
1335 x = (x * x) & mask;
1338 return rslt;
1341 /* Checks whether any register in X is in set ALT. */
1343 static bool
1344 altered_reg_used (const_rtx x, bitmap alt)
1346 subrtx_iterator::array_type array;
1347 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1349 const_rtx x = *iter;
1350 if (REG_P (x) && REGNO_REG_SET_P (alt, REGNO (x)))
1351 return true;
1353 return false;
1356 /* Marks registers altered by EXPR in set ALT. */
1358 static void
1359 mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1361 if (GET_CODE (expr) == SUBREG)
1362 expr = SUBREG_REG (expr);
1363 if (!REG_P (expr))
1364 return;
1366 SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1369 /* Checks whether RHS is simple enough to process. */
1371 static bool
1372 simple_rhs_p (rtx rhs)
1374 rtx op0, op1;
1376 if (function_invariant_p (rhs)
1377 || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1378 return true;
1380 switch (GET_CODE (rhs))
1382 case PLUS:
1383 case MINUS:
1384 case AND:
1385 op0 = XEXP (rhs, 0);
1386 op1 = XEXP (rhs, 1);
1387 /* Allow reg OP const and reg OP reg. */
1388 if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1389 && !function_invariant_p (op0))
1390 return false;
1391 if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1392 && !function_invariant_p (op1))
1393 return false;
1395 return true;
1397 case ASHIFT:
1398 case ASHIFTRT:
1399 case LSHIFTRT:
1400 case MULT:
1401 op0 = XEXP (rhs, 0);
1402 op1 = XEXP (rhs, 1);
1403 /* Allow reg OP const. */
1404 if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1405 return false;
1406 if (!function_invariant_p (op1))
1407 return false;
1409 return true;
1411 default:
1412 return false;
1416 /* If REGNO has a single definition, return its known value, otherwise return
1417 null. */
1419 static rtx
1420 find_single_def_src (unsigned int regno)
1422 df_ref adef;
1423 rtx set, src;
1425 for (;;)
1427 rtx note;
1428 adef = DF_REG_DEF_CHAIN (regno);
1429 if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1430 || DF_REF_IS_ARTIFICIAL (adef))
1431 return NULL_RTX;
1433 set = single_set (DF_REF_INSN (adef));
1434 if (set == NULL || !REG_P (SET_DEST (set))
1435 || REGNO (SET_DEST (set)) != regno)
1436 return NULL_RTX;
1438 note = find_reg_equal_equiv_note (DF_REF_INSN (adef));
1440 if (note && function_invariant_p (XEXP (note, 0)))
1442 src = XEXP (note, 0);
1443 break;
1445 src = SET_SRC (set);
1447 if (REG_P (src))
1449 regno = REGNO (src);
1450 continue;
1452 break;
1454 if (!function_invariant_p (src))
1455 return NULL_RTX;
1457 return src;
1460 /* If any registers in *EXPR that have a single definition, try to replace
1461 them with the known-equivalent values. */
1463 static void
1464 replace_single_def_regs (rtx *expr)
1466 subrtx_var_iterator::array_type array;
1467 repeat:
1468 FOR_EACH_SUBRTX_VAR (iter, array, *expr, NONCONST)
1470 rtx x = *iter;
1471 if (REG_P (x))
1472 if (rtx new_x = find_single_def_src (REGNO (x)))
1474 *expr = simplify_replace_rtx (*expr, x, new_x);
1475 goto repeat;
1480 /* A subroutine of simplify_using_initial_values, this function examines INSN
1481 to see if it contains a suitable set that we can use to make a replacement.
1482 If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1483 the set; return false otherwise. */
1485 static bool
1486 suitable_set_for_replacement (rtx_insn *insn, rtx *dest, rtx *src)
1488 rtx set = single_set (insn);
1489 rtx lhs = NULL_RTX, rhs;
1491 if (!set)
1492 return false;
1494 lhs = SET_DEST (set);
1495 if (!REG_P (lhs))
1496 return false;
1498 rhs = find_reg_equal_equiv_note (insn);
1499 if (rhs)
1500 rhs = XEXP (rhs, 0);
1501 else
1502 rhs = SET_SRC (set);
1504 if (!simple_rhs_p (rhs))
1505 return false;
1507 *dest = lhs;
1508 *src = rhs;
1509 return true;
1512 /* Using the data returned by suitable_set_for_replacement, replace DEST
1513 with SRC in *EXPR and return the new expression. Also call
1514 replace_single_def_regs if the replacement changed something. */
1515 static void
1516 replace_in_expr (rtx *expr, rtx dest, rtx src)
1518 rtx old = *expr;
1519 *expr = simplify_replace_rtx (*expr, dest, src);
1520 if (old == *expr)
1521 return;
1522 replace_single_def_regs (expr);
1525 /* Checks whether A implies B. */
1527 static bool
1528 implies_p (rtx a, rtx b)
1530 rtx op0, op1, opb0, opb1, r;
1531 machine_mode mode;
1533 if (rtx_equal_p (a, b))
1534 return true;
1536 if (GET_CODE (a) == EQ)
1538 op0 = XEXP (a, 0);
1539 op1 = XEXP (a, 1);
1541 if (REG_P (op0)
1542 || (GET_CODE (op0) == SUBREG
1543 && REG_P (SUBREG_REG (op0))))
1545 r = simplify_replace_rtx (b, op0, op1);
1546 if (r == const_true_rtx)
1547 return true;
1550 if (REG_P (op1)
1551 || (GET_CODE (op1) == SUBREG
1552 && REG_P (SUBREG_REG (op1))))
1554 r = simplify_replace_rtx (b, op1, op0);
1555 if (r == const_true_rtx)
1556 return true;
1560 if (b == const_true_rtx)
1561 return true;
1563 if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1564 && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1565 || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1566 && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1567 return false;
1569 op0 = XEXP (a, 0);
1570 op1 = XEXP (a, 1);
1571 opb0 = XEXP (b, 0);
1572 opb1 = XEXP (b, 1);
1574 mode = GET_MODE (op0);
1575 if (mode != GET_MODE (opb0))
1576 mode = VOIDmode;
1577 else if (mode == VOIDmode)
1579 mode = GET_MODE (op1);
1580 if (mode != GET_MODE (opb1))
1581 mode = VOIDmode;
1584 /* A < B implies A + 1 <= B. */
1585 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1586 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1589 if (GET_CODE (a) == GT)
1591 r = op0;
1592 op0 = op1;
1593 op1 = r;
1596 if (GET_CODE (b) == GE)
1598 r = opb0;
1599 opb0 = opb1;
1600 opb1 = r;
1603 if (SCALAR_INT_MODE_P (mode)
1604 && rtx_equal_p (op1, opb1)
1605 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1606 return true;
1607 return false;
1610 /* A < B or A > B imply A != B. TODO: Likewise
1611 A + n < B implies A != B + n if neither wraps. */
1612 if (GET_CODE (b) == NE
1613 && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1614 || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1616 if (rtx_equal_p (op0, opb0)
1617 && rtx_equal_p (op1, opb1))
1618 return true;
1621 /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1. */
1622 if (GET_CODE (a) == NE
1623 && op1 == const0_rtx)
1625 if ((GET_CODE (b) == GTU
1626 && opb1 == const0_rtx)
1627 || (GET_CODE (b) == GEU
1628 && opb1 == const1_rtx))
1629 return rtx_equal_p (op0, opb0);
1632 /* A != N is equivalent to A - (N + 1) <u -1. */
1633 if (GET_CODE (a) == NE
1634 && CONST_INT_P (op1)
1635 && GET_CODE (b) == LTU
1636 && opb1 == constm1_rtx
1637 && GET_CODE (opb0) == PLUS
1638 && CONST_INT_P (XEXP (opb0, 1))
1639 /* Avoid overflows. */
1640 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1641 != ((unsigned HOST_WIDE_INT)1
1642 << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1643 && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1644 return rtx_equal_p (op0, XEXP (opb0, 0));
1646 /* Likewise, A != N implies A - N > 0. */
1647 if (GET_CODE (a) == NE
1648 && CONST_INT_P (op1))
1650 if (GET_CODE (b) == GTU
1651 && GET_CODE (opb0) == PLUS
1652 && opb1 == const0_rtx
1653 && CONST_INT_P (XEXP (opb0, 1))
1654 /* Avoid overflows. */
1655 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1656 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1657 && rtx_equal_p (XEXP (opb0, 0), op0))
1658 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1659 if (GET_CODE (b) == GEU
1660 && GET_CODE (opb0) == PLUS
1661 && opb1 == const1_rtx
1662 && CONST_INT_P (XEXP (opb0, 1))
1663 /* Avoid overflows. */
1664 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1665 != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1666 && rtx_equal_p (XEXP (opb0, 0), op0))
1667 return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1670 /* A >s X, where X is positive, implies A <u Y, if Y is negative. */
1671 if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1672 && CONST_INT_P (op1)
1673 && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1674 || INTVAL (op1) >= 0)
1675 && GET_CODE (b) == LTU
1676 && CONST_INT_P (opb1)
1677 && rtx_equal_p (op0, opb0))
1678 return INTVAL (opb1) < 0;
1680 return false;
1683 /* Canonicalizes COND so that
1685 (1) Ensure that operands are ordered according to
1686 swap_commutative_operands_p.
1687 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1688 for GE, GEU, and LEU. */
1691 canon_condition (rtx cond)
1693 rtx tem;
1694 rtx op0, op1;
1695 enum rtx_code code;
1696 machine_mode mode;
1698 code = GET_CODE (cond);
1699 op0 = XEXP (cond, 0);
1700 op1 = XEXP (cond, 1);
1702 if (swap_commutative_operands_p (op0, op1))
1704 code = swap_condition (code);
1705 tem = op0;
1706 op0 = op1;
1707 op1 = tem;
1710 mode = GET_MODE (op0);
1711 if (mode == VOIDmode)
1712 mode = GET_MODE (op1);
1713 gcc_assert (mode != VOIDmode);
1715 if (CONST_INT_P (op1)
1716 && GET_MODE_CLASS (mode) != MODE_CC
1717 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1719 HOST_WIDE_INT const_val = INTVAL (op1);
1720 unsigned HOST_WIDE_INT uconst_val = const_val;
1721 unsigned HOST_WIDE_INT max_val
1722 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1724 switch (code)
1726 case LE:
1727 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1728 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1729 break;
1731 /* When cross-compiling, const_val might be sign-extended from
1732 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1733 case GE:
1734 if ((HOST_WIDE_INT) (const_val & max_val)
1735 != (((HOST_WIDE_INT) 1
1736 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1737 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1738 break;
1740 case LEU:
1741 if (uconst_val < max_val)
1742 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1743 break;
1745 case GEU:
1746 if (uconst_val != 0)
1747 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1748 break;
1750 default:
1751 break;
1755 if (op0 != XEXP (cond, 0)
1756 || op1 != XEXP (cond, 1)
1757 || code != GET_CODE (cond)
1758 || GET_MODE (cond) != SImode)
1759 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1761 return cond;
1764 /* Reverses CONDition; returns NULL if we cannot. */
1766 static rtx
1767 reversed_condition (rtx cond)
1769 enum rtx_code reversed;
1770 reversed = reversed_comparison_code (cond, NULL);
1771 if (reversed == UNKNOWN)
1772 return NULL_RTX;
1773 else
1774 return gen_rtx_fmt_ee (reversed,
1775 GET_MODE (cond), XEXP (cond, 0),
1776 XEXP (cond, 1));
1779 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1780 set of altered regs. */
1782 void
1783 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1785 rtx rev, reve, exp = *expr;
1787 /* If some register gets altered later, we do not really speak about its
1788 value at the time of comparison. */
1789 if (altered && altered_reg_used (cond, altered))
1790 return;
1792 if (GET_CODE (cond) == EQ
1793 && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1795 *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1796 return;
1799 if (!COMPARISON_P (exp))
1800 return;
1802 rev = reversed_condition (cond);
1803 reve = reversed_condition (exp);
1805 cond = canon_condition (cond);
1806 exp = canon_condition (exp);
1807 if (rev)
1808 rev = canon_condition (rev);
1809 if (reve)
1810 reve = canon_condition (reve);
1812 if (rtx_equal_p (exp, cond))
1814 *expr = const_true_rtx;
1815 return;
1818 if (rev && rtx_equal_p (exp, rev))
1820 *expr = const0_rtx;
1821 return;
1824 if (implies_p (cond, exp))
1826 *expr = const_true_rtx;
1827 return;
1830 if (reve && implies_p (cond, reve))
1832 *expr = const0_rtx;
1833 return;
1836 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1837 be false. */
1838 if (rev && implies_p (exp, rev))
1840 *expr = const0_rtx;
1841 return;
1844 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1845 if (rev && reve && implies_p (reve, rev))
1847 *expr = const_true_rtx;
1848 return;
1851 /* We would like to have some other tests here. TODO. */
1853 return;
1856 /* Use relationship between A and *B to eventually eliminate *B.
1857 OP is the operation we consider. */
1859 static void
1860 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1862 switch (op)
1864 case AND:
1865 /* If A implies *B, we may replace *B by true. */
1866 if (implies_p (a, *b))
1867 *b = const_true_rtx;
1868 break;
1870 case IOR:
1871 /* If *B implies A, we may replace *B by false. */
1872 if (implies_p (*b, a))
1873 *b = const0_rtx;
1874 break;
1876 default:
1877 gcc_unreachable ();
1881 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1882 operation we consider. */
1884 static void
1885 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1887 rtx elt;
1889 for (elt = tail; elt; elt = XEXP (elt, 1))
1890 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1891 for (elt = tail; elt; elt = XEXP (elt, 1))
1892 eliminate_implied_condition (op, XEXP (elt, 0), head);
1895 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1896 is a list, its elements are assumed to be combined using OP. */
1898 static void
1899 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1901 bool expression_valid;
1902 rtx head, tail, last_valid_expr;
1903 rtx_expr_list *cond_list;
1904 rtx_insn *insn;
1905 rtx neutral, aggr;
1906 regset altered, this_altered;
1907 edge e;
1909 if (!*expr)
1910 return;
1912 if (CONSTANT_P (*expr))
1913 return;
1915 if (GET_CODE (*expr) == EXPR_LIST)
1917 head = XEXP (*expr, 0);
1918 tail = XEXP (*expr, 1);
1920 eliminate_implied_conditions (op, &head, tail);
1922 switch (op)
1924 case AND:
1925 neutral = const_true_rtx;
1926 aggr = const0_rtx;
1927 break;
1929 case IOR:
1930 neutral = const0_rtx;
1931 aggr = const_true_rtx;
1932 break;
1934 default:
1935 gcc_unreachable ();
1938 simplify_using_initial_values (loop, UNKNOWN, &head);
1939 if (head == aggr)
1941 XEXP (*expr, 0) = aggr;
1942 XEXP (*expr, 1) = NULL_RTX;
1943 return;
1945 else if (head == neutral)
1947 *expr = tail;
1948 simplify_using_initial_values (loop, op, expr);
1949 return;
1951 simplify_using_initial_values (loop, op, &tail);
1953 if (tail && XEXP (tail, 0) == aggr)
1955 *expr = tail;
1956 return;
1959 XEXP (*expr, 0) = head;
1960 XEXP (*expr, 1) = tail;
1961 return;
1964 gcc_assert (op == UNKNOWN);
1966 replace_single_def_regs (expr);
1967 if (CONSTANT_P (*expr))
1968 return;
1970 e = loop_preheader_edge (loop);
1971 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1972 return;
1974 altered = ALLOC_REG_SET (&reg_obstack);
1975 this_altered = ALLOC_REG_SET (&reg_obstack);
1977 expression_valid = true;
1978 last_valid_expr = *expr;
1979 cond_list = NULL;
1980 while (1)
1982 insn = BB_END (e->src);
1983 if (any_condjump_p (insn))
1985 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1987 if (cond && (e->flags & EDGE_FALLTHRU))
1988 cond = reversed_condition (cond);
1989 if (cond)
1991 rtx old = *expr;
1992 simplify_using_condition (cond, expr, altered);
1993 if (old != *expr)
1995 rtx note;
1996 if (CONSTANT_P (*expr))
1997 goto out;
1998 for (note = cond_list; note; note = XEXP (note, 1))
2000 simplify_using_condition (XEXP (note, 0), expr, altered);
2001 if (CONSTANT_P (*expr))
2002 goto out;
2005 cond_list = alloc_EXPR_LIST (0, cond, cond_list);
2009 FOR_BB_INSNS_REVERSE (e->src, insn)
2011 rtx src, dest;
2012 rtx old = *expr;
2014 if (!INSN_P (insn))
2015 continue;
2017 CLEAR_REG_SET (this_altered);
2018 note_stores (PATTERN (insn), mark_altered, this_altered);
2019 if (CALL_P (insn))
2021 /* Kill all call clobbered registers. */
2022 unsigned int i;
2023 hard_reg_set_iterator hrsi;
2024 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call,
2025 0, i, hrsi)
2026 SET_REGNO_REG_SET (this_altered, i);
2029 if (suitable_set_for_replacement (insn, &dest, &src))
2031 rtx_expr_list **pnote, **pnote_next;
2033 replace_in_expr (expr, dest, src);
2034 if (CONSTANT_P (*expr))
2035 goto out;
2037 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2039 rtx note = *pnote;
2040 rtx old_cond = XEXP (note, 0);
2042 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2043 replace_in_expr (&XEXP (note, 0), dest, src);
2045 /* We can no longer use a condition that has been simplified
2046 to a constant, and simplify_using_condition will abort if
2047 we try. */
2048 if (CONSTANT_P (XEXP (note, 0)))
2050 *pnote = *pnote_next;
2051 pnote_next = pnote;
2052 free_EXPR_LIST_node (note);
2054 /* Retry simplifications with this condition if either the
2055 expression or the condition changed. */
2056 else if (old_cond != XEXP (note, 0) || old != *expr)
2057 simplify_using_condition (XEXP (note, 0), expr, altered);
2060 else
2062 rtx_expr_list **pnote, **pnote_next;
2064 /* If we did not use this insn to make a replacement, any overlap
2065 between stores in this insn and our expression will cause the
2066 expression to become invalid. */
2067 if (altered_reg_used (*expr, this_altered))
2068 goto out;
2070 /* Likewise for the conditions. */
2071 for (pnote = &cond_list; *pnote; pnote = pnote_next)
2073 rtx note = *pnote;
2074 rtx old_cond = XEXP (note, 0);
2076 pnote_next = (rtx_expr_list **)&XEXP (note, 1);
2077 if (altered_reg_used (old_cond, this_altered))
2079 *pnote = *pnote_next;
2080 pnote_next = pnote;
2081 free_EXPR_LIST_node (note);
2086 if (CONSTANT_P (*expr))
2087 goto out;
2089 IOR_REG_SET (altered, this_altered);
2091 /* If the expression now contains regs that have been altered, we
2092 can't return it to the caller. However, it is still valid for
2093 further simplification, so keep searching to see if we can
2094 eventually turn it into a constant. */
2095 if (altered_reg_used (*expr, altered))
2096 expression_valid = false;
2097 if (expression_valid)
2098 last_valid_expr = *expr;
2101 if (!single_pred_p (e->src)
2102 || single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2103 break;
2104 e = single_pred_edge (e->src);
2107 out:
2108 free_EXPR_LIST_list (&cond_list);
2109 if (!CONSTANT_P (*expr))
2110 *expr = last_valid_expr;
2111 FREE_REG_SET (altered);
2112 FREE_REG_SET (this_altered);
2115 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
2116 that IV occurs as left operands of comparison COND and its signedness
2117 is SIGNED_P to DESC. */
2119 static void
2120 shorten_into_mode (struct rtx_iv *iv, machine_mode mode,
2121 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2123 rtx mmin, mmax, cond_over, cond_under;
2125 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2126 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2127 iv->base, mmin);
2128 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2129 iv->base, mmax);
2131 switch (cond)
2133 case LE:
2134 case LT:
2135 case LEU:
2136 case LTU:
2137 if (cond_under != const0_rtx)
2138 desc->infinite =
2139 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2140 if (cond_over != const0_rtx)
2141 desc->noloop_assumptions =
2142 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2143 break;
2145 case GE:
2146 case GT:
2147 case GEU:
2148 case GTU:
2149 if (cond_over != const0_rtx)
2150 desc->infinite =
2151 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2152 if (cond_under != const0_rtx)
2153 desc->noloop_assumptions =
2154 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2155 break;
2157 case NE:
2158 if (cond_over != const0_rtx)
2159 desc->infinite =
2160 alloc_EXPR_LIST (0, cond_over, desc->infinite);
2161 if (cond_under != const0_rtx)
2162 desc->infinite =
2163 alloc_EXPR_LIST (0, cond_under, desc->infinite);
2164 break;
2166 default:
2167 gcc_unreachable ();
2170 iv->mode = mode;
2171 iv->extend = signed_p ? IV_SIGN_EXTEND : IV_ZERO_EXTEND;
2174 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
2175 subregs of the same mode if possible (sometimes it is necessary to add
2176 some assumptions to DESC). */
2178 static bool
2179 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2180 enum rtx_code cond, struct niter_desc *desc)
2182 machine_mode comp_mode;
2183 bool signed_p;
2185 /* If the ivs behave specially in the first iteration, or are
2186 added/multiplied after extending, we ignore them. */
2187 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2188 return false;
2189 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2190 return false;
2192 /* If there is some extend, it must match signedness of the comparison. */
2193 switch (cond)
2195 case LE:
2196 case LT:
2197 if (iv0->extend == IV_ZERO_EXTEND
2198 || iv1->extend == IV_ZERO_EXTEND)
2199 return false;
2200 signed_p = true;
2201 break;
2203 case LEU:
2204 case LTU:
2205 if (iv0->extend == IV_SIGN_EXTEND
2206 || iv1->extend == IV_SIGN_EXTEND)
2207 return false;
2208 signed_p = false;
2209 break;
2211 case NE:
2212 if (iv0->extend != IV_UNKNOWN_EXTEND
2213 && iv1->extend != IV_UNKNOWN_EXTEND
2214 && iv0->extend != iv1->extend)
2215 return false;
2217 signed_p = false;
2218 if (iv0->extend != IV_UNKNOWN_EXTEND)
2219 signed_p = iv0->extend == IV_SIGN_EXTEND;
2220 if (iv1->extend != IV_UNKNOWN_EXTEND)
2221 signed_p = iv1->extend == IV_SIGN_EXTEND;
2222 break;
2224 default:
2225 gcc_unreachable ();
2228 /* Values of both variables should be computed in the same mode. These
2229 might indeed be different, if we have comparison like
2231 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2233 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2234 in different modes. This does not seem impossible to handle, but
2235 it hardly ever occurs in practice.
2237 The only exception is the case when one of operands is invariant.
2238 For example pentium 3 generates comparisons like
2239 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
2240 definitely do not want this prevent the optimization. */
2241 comp_mode = iv0->extend_mode;
2242 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2243 comp_mode = iv1->extend_mode;
2245 if (iv0->extend_mode != comp_mode)
2247 if (iv0->mode != iv0->extend_mode
2248 || iv0->step != const0_rtx)
2249 return false;
2251 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2252 comp_mode, iv0->base, iv0->mode);
2253 iv0->extend_mode = comp_mode;
2256 if (iv1->extend_mode != comp_mode)
2258 if (iv1->mode != iv1->extend_mode
2259 || iv1->step != const0_rtx)
2260 return false;
2262 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2263 comp_mode, iv1->base, iv1->mode);
2264 iv1->extend_mode = comp_mode;
2267 /* Check that both ivs belong to a range of a single mode. If one of the
2268 operands is an invariant, we may need to shorten it into the common
2269 mode. */
2270 if (iv0->mode == iv0->extend_mode
2271 && iv0->step == const0_rtx
2272 && iv0->mode != iv1->mode)
2273 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2275 if (iv1->mode == iv1->extend_mode
2276 && iv1->step == const0_rtx
2277 && iv0->mode != iv1->mode)
2278 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2280 if (iv0->mode != iv1->mode)
2281 return false;
2283 desc->mode = iv0->mode;
2284 desc->signed_p = signed_p;
2286 return true;
2289 /* Tries to estimate the maximum number of iterations in LOOP, and return the
2290 result. This function is called from iv_number_of_iterations with
2291 a number of fields in DESC already filled in. OLD_NITER is the original
2292 expression for the number of iterations, before we tried to simplify it. */
2294 static uint64_t
2295 determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2297 rtx niter = desc->niter_expr;
2298 rtx mmin, mmax, cmp;
2299 uint64_t nmax, inc;
2300 uint64_t andmax = 0;
2302 /* We used to look for constant operand 0 of AND,
2303 but canonicalization should always make this impossible. */
2304 gcc_checking_assert (GET_CODE (niter) != AND
2305 || !CONST_INT_P (XEXP (niter, 0)));
2307 if (GET_CODE (niter) == AND
2308 && CONST_INT_P (XEXP (niter, 1)))
2310 andmax = UINTVAL (XEXP (niter, 1));
2311 niter = XEXP (niter, 0);
2314 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2315 nmax = UINTVAL (mmax) - UINTVAL (mmin);
2317 if (GET_CODE (niter) == UDIV)
2319 if (!CONST_INT_P (XEXP (niter, 1)))
2320 return nmax;
2321 inc = INTVAL (XEXP (niter, 1));
2322 niter = XEXP (niter, 0);
2324 else
2325 inc = 1;
2327 /* We could use a binary search here, but for now improving the upper
2328 bound by just one eliminates one important corner case. */
2329 cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2330 desc->mode, old_niter, mmax);
2331 simplify_using_initial_values (loop, UNKNOWN, &cmp);
2332 if (cmp == const_true_rtx)
2334 nmax--;
2336 if (dump_file)
2337 fprintf (dump_file, ";; improved upper bound by one.\n");
2339 nmax /= inc;
2340 if (andmax)
2341 nmax = MIN (nmax, andmax);
2342 if (dump_file)
2343 fprintf (dump_file, ";; Determined upper bound %"PRId64".\n",
2344 nmax);
2345 return nmax;
2348 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2349 the result into DESC. Very similar to determine_number_of_iterations
2350 (basically its rtl version), complicated by things like subregs. */
2352 static void
2353 iv_number_of_iterations (struct loop *loop, rtx_insn *insn, rtx condition,
2354 struct niter_desc *desc)
2356 rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2357 struct rtx_iv iv0, iv1, tmp_iv;
2358 rtx assumption, may_not_xform;
2359 enum rtx_code cond;
2360 machine_mode mode, comp_mode;
2361 rtx mmin, mmax, mode_mmin, mode_mmax;
2362 uint64_t s, size, d, inv, max;
2363 int64_t up, down, inc, step_val;
2364 int was_sharp = false;
2365 rtx old_niter;
2366 bool step_is_pow2;
2368 /* The meaning of these assumptions is this:
2369 if !assumptions
2370 then the rest of information does not have to be valid
2371 if noloop_assumptions then the loop does not roll
2372 if infinite then this exit is never used */
2374 desc->assumptions = NULL_RTX;
2375 desc->noloop_assumptions = NULL_RTX;
2376 desc->infinite = NULL_RTX;
2377 desc->simple_p = true;
2379 desc->const_iter = false;
2380 desc->niter_expr = NULL_RTX;
2382 cond = GET_CODE (condition);
2383 gcc_assert (COMPARISON_P (condition));
2385 mode = GET_MODE (XEXP (condition, 0));
2386 if (mode == VOIDmode)
2387 mode = GET_MODE (XEXP (condition, 1));
2388 /* The constant comparisons should be folded. */
2389 gcc_assert (mode != VOIDmode);
2391 /* We only handle integers or pointers. */
2392 if (GET_MODE_CLASS (mode) != MODE_INT
2393 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2394 goto fail;
2396 op0 = XEXP (condition, 0);
2397 if (!iv_analyze (insn, op0, &iv0))
2398 goto fail;
2399 if (iv0.extend_mode == VOIDmode)
2400 iv0.mode = iv0.extend_mode = mode;
2402 op1 = XEXP (condition, 1);
2403 if (!iv_analyze (insn, op1, &iv1))
2404 goto fail;
2405 if (iv1.extend_mode == VOIDmode)
2406 iv1.mode = iv1.extend_mode = mode;
2408 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2409 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2410 goto fail;
2412 /* Check condition and normalize it. */
2414 switch (cond)
2416 case GE:
2417 case GT:
2418 case GEU:
2419 case GTU:
2420 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2421 cond = swap_condition (cond);
2422 break;
2423 case NE:
2424 case LE:
2425 case LEU:
2426 case LT:
2427 case LTU:
2428 break;
2429 default:
2430 goto fail;
2433 /* Handle extends. This is relatively nontrivial, so we only try in some
2434 easy cases, when we can canonicalize the ivs (possibly by adding some
2435 assumptions) to shape subreg (base + i * step). This function also fills
2436 in desc->mode and desc->signed_p. */
2438 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2439 goto fail;
2441 comp_mode = iv0.extend_mode;
2442 mode = iv0.mode;
2443 size = GET_MODE_PRECISION (mode);
2444 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2445 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2446 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2448 if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
2449 goto fail;
2451 /* We can take care of the case of two induction variables chasing each other
2452 if the test is NE. I have never seen a loop using it, but still it is
2453 cool. */
2454 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2456 if (cond != NE)
2457 goto fail;
2459 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2460 iv1.step = const0_rtx;
2463 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2464 iv1.step = lowpart_subreg (mode, iv1.step, comp_mode);
2466 /* This is either infinite loop or the one that ends immediately, depending
2467 on initial values. Unswitching should remove this kind of conditions. */
2468 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2469 goto fail;
2471 if (cond != NE)
2473 if (iv0.step == const0_rtx)
2474 step_val = -INTVAL (iv1.step);
2475 else
2476 step_val = INTVAL (iv0.step);
2478 /* Ignore loops of while (i-- < 10) type. */
2479 if (step_val < 0)
2480 goto fail;
2482 step_is_pow2 = !(step_val & (step_val - 1));
2484 else
2486 /* We do not care about whether the step is power of two in this
2487 case. */
2488 step_is_pow2 = false;
2489 step_val = 0;
2492 /* Some more condition normalization. We must record some assumptions
2493 due to overflows. */
2494 switch (cond)
2496 case LT:
2497 case LTU:
2498 /* We want to take care only of non-sharp relationals; this is easy,
2499 as in cases the overflow would make the transformation unsafe
2500 the loop does not roll. Seemingly it would make more sense to want
2501 to take care of sharp relationals instead, as NE is more similar to
2502 them, but the problem is that here the transformation would be more
2503 difficult due to possibly infinite loops. */
2504 if (iv0.step == const0_rtx)
2506 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2507 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2508 mode_mmax);
2509 if (assumption == const_true_rtx)
2510 goto zero_iter_simplify;
2511 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2512 iv0.base, const1_rtx);
2514 else
2516 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2517 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2518 mode_mmin);
2519 if (assumption == const_true_rtx)
2520 goto zero_iter_simplify;
2521 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2522 iv1.base, constm1_rtx);
2525 if (assumption != const0_rtx)
2526 desc->noloop_assumptions =
2527 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2528 cond = (cond == LT) ? LE : LEU;
2530 /* It will be useful to be able to tell the difference once more in
2531 LE -> NE reduction. */
2532 was_sharp = true;
2533 break;
2534 default: ;
2537 /* Take care of trivially infinite loops. */
2538 if (cond != NE)
2540 if (iv0.step == const0_rtx)
2542 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2543 if (rtx_equal_p (tmp, mode_mmin))
2545 desc->infinite =
2546 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2547 /* Fill in the remaining fields somehow. */
2548 goto zero_iter_simplify;
2551 else
2553 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2554 if (rtx_equal_p (tmp, mode_mmax))
2556 desc->infinite =
2557 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2558 /* Fill in the remaining fields somehow. */
2559 goto zero_iter_simplify;
2564 /* If we can we want to take care of NE conditions instead of size
2565 comparisons, as they are much more friendly (most importantly
2566 this takes care of special handling of loops with step 1). We can
2567 do it if we first check that upper bound is greater or equal to
2568 lower bound, their difference is constant c modulo step and that
2569 there is not an overflow. */
2570 if (cond != NE)
2572 if (iv0.step == const0_rtx)
2573 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2574 else
2575 step = iv0.step;
2576 step = lowpart_subreg (mode, step, comp_mode);
2577 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2578 delta = lowpart_subreg (mode, delta, comp_mode);
2579 delta = simplify_gen_binary (UMOD, mode, delta, step);
2580 may_xform = const0_rtx;
2581 may_not_xform = const_true_rtx;
2583 if (CONST_INT_P (delta))
2585 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2587 /* A special case. We have transformed condition of type
2588 for (i = 0; i < 4; i += 4)
2589 into
2590 for (i = 0; i <= 3; i += 4)
2591 obviously if the test for overflow during that transformation
2592 passed, we cannot overflow here. Most importantly any
2593 loop with sharp end condition and step 1 falls into this
2594 category, so handling this case specially is definitely
2595 worth the troubles. */
2596 may_xform = const_true_rtx;
2598 else if (iv0.step == const0_rtx)
2600 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2601 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2602 bound = lowpart_subreg (mode, bound, comp_mode);
2603 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2604 may_xform = simplify_gen_relational (cond, SImode, mode,
2605 bound, tmp);
2606 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2607 SImode, mode,
2608 bound, tmp);
2610 else
2612 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2613 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2614 bound = lowpart_subreg (mode, bound, comp_mode);
2615 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2616 may_xform = simplify_gen_relational (cond, SImode, mode,
2617 tmp, bound);
2618 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2619 SImode, mode,
2620 tmp, bound);
2624 if (may_xform != const0_rtx)
2626 /* We perform the transformation always provided that it is not
2627 completely senseless. This is OK, as we would need this assumption
2628 to determine the number of iterations anyway. */
2629 if (may_xform != const_true_rtx)
2631 /* If the step is a power of two and the final value we have
2632 computed overflows, the cycle is infinite. Otherwise it
2633 is nontrivial to compute the number of iterations. */
2634 if (step_is_pow2)
2635 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2636 desc->infinite);
2637 else
2638 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2639 desc->assumptions);
2642 /* We are going to lose some information about upper bound on
2643 number of iterations in this step, so record the information
2644 here. */
2645 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2646 if (CONST_INT_P (iv1.base))
2647 up = INTVAL (iv1.base);
2648 else
2649 up = INTVAL (mode_mmax) - inc;
2650 down = INTVAL (CONST_INT_P (iv0.base)
2651 ? iv0.base
2652 : mode_mmin);
2653 max = (uint64_t) (up - down) / inc + 1;
2654 if (!desc->infinite
2655 && !desc->assumptions)
2656 record_niter_bound (loop, max, false, true);
2658 if (iv0.step == const0_rtx)
2660 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2661 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2663 else
2665 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2666 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2669 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2670 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2671 assumption = simplify_gen_relational (reverse_condition (cond),
2672 SImode, mode, tmp0, tmp1);
2673 if (assumption == const_true_rtx)
2674 goto zero_iter_simplify;
2675 else if (assumption != const0_rtx)
2676 desc->noloop_assumptions =
2677 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2678 cond = NE;
2682 /* Count the number of iterations. */
2683 if (cond == NE)
2685 /* Everything we do here is just arithmetics modulo size of mode. This
2686 makes us able to do more involved computations of number of iterations
2687 than in other cases. First transform the condition into shape
2688 s * i <> c, with s positive. */
2689 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2690 iv0.base = const0_rtx;
2691 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2692 iv1.step = const0_rtx;
2693 if (INTVAL (iv0.step) < 0)
2695 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, comp_mode);
2696 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, comp_mode);
2698 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2700 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2701 is infinite. Otherwise, the number of iterations is
2702 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2703 s = INTVAL (iv0.step); d = 1;
2704 while (s % 2 != 1)
2706 s /= 2;
2707 d *= 2;
2708 size--;
2710 bound = GEN_INT (((uint64_t) 1 << (size - 1 ) << 1) - 1);
2712 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2713 tmp = simplify_gen_binary (UMOD, mode, tmp1, gen_int_mode (d, mode));
2714 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2715 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2717 tmp = simplify_gen_binary (UDIV, mode, tmp1, gen_int_mode (d, mode));
2718 inv = inverse (s, size);
2719 tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2720 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2722 else
2724 if (iv1.step == const0_rtx)
2725 /* Condition in shape a + s * i <= b
2726 We must know that b + s does not overflow and a <= b + s and then we
2727 can compute number of iterations as (b + s - a) / s. (It might
2728 seem that we in fact could be more clever about testing the b + s
2729 overflow condition using some information about b - a mod s,
2730 but it was already taken into account during LE -> NE transform). */
2732 step = iv0.step;
2733 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2734 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2736 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2737 lowpart_subreg (mode, step,
2738 comp_mode));
2739 if (step_is_pow2)
2741 rtx t0, t1;
2743 /* If s is power of 2, we know that the loop is infinite if
2744 a % s <= b % s and b + s overflows. */
2745 assumption = simplify_gen_relational (reverse_condition (cond),
2746 SImode, mode,
2747 tmp1, bound);
2749 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2750 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2751 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2752 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2753 desc->infinite =
2754 alloc_EXPR_LIST (0, assumption, desc->infinite);
2756 else
2758 assumption = simplify_gen_relational (cond, SImode, mode,
2759 tmp1, bound);
2760 desc->assumptions =
2761 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2764 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2765 tmp = lowpart_subreg (mode, tmp, comp_mode);
2766 assumption = simplify_gen_relational (reverse_condition (cond),
2767 SImode, mode, tmp0, tmp);
2769 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2770 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2772 else
2774 /* Condition in shape a <= b - s * i
2775 We must know that a - s does not overflow and a - s <= b and then
2776 we can again compute number of iterations as (b - (a - s)) / s. */
2777 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2778 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2779 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2781 bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2782 lowpart_subreg (mode, step, comp_mode));
2783 if (step_is_pow2)
2785 rtx t0, t1;
2787 /* If s is power of 2, we know that the loop is infinite if
2788 a % s <= b % s and a - s overflows. */
2789 assumption = simplify_gen_relational (reverse_condition (cond),
2790 SImode, mode,
2791 bound, tmp0);
2793 t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2794 t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2795 tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2796 assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2797 desc->infinite =
2798 alloc_EXPR_LIST (0, assumption, desc->infinite);
2800 else
2802 assumption = simplify_gen_relational (cond, SImode, mode,
2803 bound, tmp0);
2804 desc->assumptions =
2805 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2808 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2809 tmp = lowpart_subreg (mode, tmp, comp_mode);
2810 assumption = simplify_gen_relational (reverse_condition (cond),
2811 SImode, mode,
2812 tmp, tmp1);
2813 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2814 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2816 if (assumption == const_true_rtx)
2817 goto zero_iter_simplify;
2818 else if (assumption != const0_rtx)
2819 desc->noloop_assumptions =
2820 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2821 delta = simplify_gen_binary (UDIV, mode, delta, step);
2822 desc->niter_expr = delta;
2825 old_niter = desc->niter_expr;
2827 simplify_using_initial_values (loop, AND, &desc->assumptions);
2828 if (desc->assumptions
2829 && XEXP (desc->assumptions, 0) == const0_rtx)
2830 goto fail;
2831 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2832 simplify_using_initial_values (loop, IOR, &desc->infinite);
2833 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2835 /* Rerun the simplification. Consider code (created by copying loop headers)
2837 i = 0;
2839 if (0 < n)
2843 i++;
2844 } while (i < n);
2847 The first pass determines that i = 0, the second pass uses it to eliminate
2848 noloop assumption. */
2850 simplify_using_initial_values (loop, AND, &desc->assumptions);
2851 if (desc->assumptions
2852 && XEXP (desc->assumptions, 0) == const0_rtx)
2853 goto fail;
2854 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2855 simplify_using_initial_values (loop, IOR, &desc->infinite);
2856 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2858 if (desc->noloop_assumptions
2859 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2860 goto zero_iter;
2862 if (CONST_INT_P (desc->niter_expr))
2864 uint64_t val = INTVAL (desc->niter_expr);
2866 desc->const_iter = true;
2867 desc->niter = val & GET_MODE_MASK (desc->mode);
2868 if (!desc->infinite
2869 && !desc->assumptions)
2870 record_niter_bound (loop, desc->niter, false, true);
2872 else
2874 max = determine_max_iter (loop, desc, old_niter);
2875 if (!max)
2876 goto zero_iter_simplify;
2877 if (!desc->infinite
2878 && !desc->assumptions)
2879 record_niter_bound (loop, max, false, true);
2881 /* simplify_using_initial_values does a copy propagation on the registers
2882 in the expression for the number of iterations. This prolongs life
2883 ranges of registers and increases register pressure, and usually
2884 brings no gain (and if it happens to do, the cse pass will take care
2885 of it anyway). So prevent this behavior, unless it enabled us to
2886 derive that the number of iterations is a constant. */
2887 desc->niter_expr = old_niter;
2890 return;
2892 zero_iter_simplify:
2893 /* Simplify the assumptions. */
2894 simplify_using_initial_values (loop, AND, &desc->assumptions);
2895 if (desc->assumptions
2896 && XEXP (desc->assumptions, 0) == const0_rtx)
2897 goto fail;
2898 simplify_using_initial_values (loop, IOR, &desc->infinite);
2900 /* Fallthru. */
2901 zero_iter:
2902 desc->const_iter = true;
2903 desc->niter = 0;
2904 record_niter_bound (loop, 0, true, true);
2905 desc->noloop_assumptions = NULL_RTX;
2906 desc->niter_expr = const0_rtx;
2907 return;
2909 fail:
2910 desc->simple_p = false;
2911 return;
2914 /* Checks whether E is a simple exit from LOOP and stores its description
2915 into DESC. */
2917 static void
2918 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2920 basic_block exit_bb;
2921 rtx condition;
2922 rtx_insn *at;
2923 edge ein;
2925 exit_bb = e->src;
2926 desc->simple_p = false;
2928 /* It must belong directly to the loop. */
2929 if (exit_bb->loop_father != loop)
2930 return;
2932 /* It must be tested (at least) once during any iteration. */
2933 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2934 return;
2936 /* It must end in a simple conditional jump. */
2937 if (!any_condjump_p (BB_END (exit_bb)))
2938 return;
2940 ein = EDGE_SUCC (exit_bb, 0);
2941 if (ein == e)
2942 ein = EDGE_SUCC (exit_bb, 1);
2944 desc->out_edge = e;
2945 desc->in_edge = ein;
2947 /* Test whether the condition is suitable. */
2948 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2949 return;
2951 if (ein->flags & EDGE_FALLTHRU)
2953 condition = reversed_condition (condition);
2954 if (!condition)
2955 return;
2958 /* Check that we are able to determine number of iterations and fill
2959 in information about it. */
2960 iv_number_of_iterations (loop, at, condition, desc);
2963 /* Finds a simple exit of LOOP and stores its description into DESC. */
2965 void
2966 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2968 unsigned i;
2969 basic_block *body;
2970 edge e;
2971 struct niter_desc act;
2972 bool any = false;
2973 edge_iterator ei;
2975 desc->simple_p = false;
2976 body = get_loop_body (loop);
2978 for (i = 0; i < loop->num_nodes; i++)
2980 FOR_EACH_EDGE (e, ei, body[i]->succs)
2982 if (flow_bb_inside_loop_p (loop, e->dest))
2983 continue;
2985 check_simple_exit (loop, e, &act);
2986 if (!act.simple_p)
2987 continue;
2989 if (!any)
2990 any = true;
2991 else
2993 /* Prefer constant iterations; the less the better. */
2994 if (!act.const_iter
2995 || (desc->const_iter && act.niter >= desc->niter))
2996 continue;
2998 /* Also if the actual exit may be infinite, while the old one
2999 not, prefer the old one. */
3000 if (act.infinite && !desc->infinite)
3001 continue;
3004 *desc = act;
3008 if (dump_file)
3010 if (desc->simple_p)
3012 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
3013 fprintf (dump_file, " simple exit %d -> %d\n",
3014 desc->out_edge->src->index,
3015 desc->out_edge->dest->index);
3016 if (desc->assumptions)
3018 fprintf (dump_file, " assumptions: ");
3019 print_rtl (dump_file, desc->assumptions);
3020 fprintf (dump_file, "\n");
3022 if (desc->noloop_assumptions)
3024 fprintf (dump_file, " does not roll if: ");
3025 print_rtl (dump_file, desc->noloop_assumptions);
3026 fprintf (dump_file, "\n");
3028 if (desc->infinite)
3030 fprintf (dump_file, " infinite if: ");
3031 print_rtl (dump_file, desc->infinite);
3032 fprintf (dump_file, "\n");
3035 fprintf (dump_file, " number of iterations: ");
3036 print_rtl (dump_file, desc->niter_expr);
3037 fprintf (dump_file, "\n");
3039 fprintf (dump_file, " upper bound: %li\n",
3040 (long)get_max_loop_iterations_int (loop));
3041 fprintf (dump_file, " realistic bound: %li\n",
3042 (long)get_estimated_loop_iterations_int (loop));
3044 else
3045 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
3048 free (body);
3051 /* Creates a simple loop description of LOOP if it was not computed
3052 already. */
3054 struct niter_desc *
3055 get_simple_loop_desc (struct loop *loop)
3057 struct niter_desc *desc = simple_loop_desc (loop);
3059 if (desc)
3060 return desc;
3062 /* At least desc->infinite is not always initialized by
3063 find_simple_loop_exit. */
3064 desc = ggc_cleared_alloc<niter_desc> ();
3065 iv_analysis_loop_init (loop);
3066 find_simple_exit (loop, desc);
3067 loop->simple_loop_desc = desc;
3069 if (desc->simple_p && (desc->assumptions || desc->infinite))
3071 const char *wording;
3073 /* Assume that no overflow happens and that the loop is finite.
3074 We already warned at the tree level if we ran optimizations there. */
3075 if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
3077 if (desc->infinite)
3079 wording =
3080 flag_unsafe_loop_optimizations
3081 ? N_("assuming that the loop is not infinite")
3082 : N_("cannot optimize possibly infinite loops");
3083 warning (OPT_Wunsafe_loop_optimizations, "%s",
3084 gettext (wording));
3086 if (desc->assumptions)
3088 wording =
3089 flag_unsafe_loop_optimizations
3090 ? N_("assuming that the loop counter does not overflow")
3091 : N_("cannot optimize loop, the loop counter may overflow");
3092 warning (OPT_Wunsafe_loop_optimizations, "%s",
3093 gettext (wording));
3097 if (flag_unsafe_loop_optimizations)
3099 desc->assumptions = NULL_RTX;
3100 desc->infinite = NULL_RTX;
3104 return desc;
3107 /* Releases simple loop description for LOOP. */
3109 void
3110 free_simple_loop_desc (struct loop *loop)
3112 struct niter_desc *desc = simple_loop_desc (loop);
3114 if (!desc)
3115 return;
3117 ggc_free (desc);
3118 loop->simple_loop_desc = NULL;