2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / tree-ssa-loop-ivopts.c
blobc7d534b1f5f4786515158acf83dc13e290826503
1 /* Induction variable optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This pass tries to find the optimal set of induction variables for the loop.
22 It optimizes just the basic linear induction variables (although adding
23 support for other types should not be too hard). It includes the
24 optimizations commonly known as strength reduction, induction variable
25 coalescing and induction variable elimination. It does it in the
26 following steps:
28 1) The interesting uses of induction variables are found. This includes
30 -- uses of induction variables in non-linear expressions
31 -- addresses of arrays
32 -- comparisons of induction variables
34 2) Candidates for the induction variables are found. This includes
36 -- old induction variables
37 -- the variables defined by expressions derived from the "interesting
38 uses" above
40 3) The optimal (w.r. to a cost function) set of variables is chosen. The
41 cost function assigns a cost to sets of induction variables and consists
42 of three parts:
44 -- The use costs. Each of the interesting uses chooses the best induction
45 variable in the set and adds its cost to the sum. The cost reflects
46 the time spent on modifying the induction variables value to be usable
47 for the given purpose (adding base and offset for arrays, etc.).
48 -- The variable costs. Each of the variables has a cost assigned that
49 reflects the costs associated with incrementing the value of the
50 variable. The original variables are somewhat preferred.
51 -- The set cost. Depending on the size of the set, extra cost may be
52 added to reflect register pressure.
54 All the costs are defined in a machine-specific way, using the target
55 hooks and machine descriptions to determine them.
57 4) The trees are transformed to use the new variables, the dead code is
58 removed.
60 All of this is done loop by loop. Doing it globally is theoretically
61 possible, it might give a better performance and it might enable us
62 to decide costs more precisely, but getting all the interactions right
63 would be complicated. */
65 #include "config.h"
66 #include "system.h"
67 #include "coretypes.h"
68 #include "tm.h"
69 #include "tree.h"
70 #include "tm_p.h"
71 #include "basic-block.h"
72 #include "output.h"
73 #include "tree-pretty-print.h"
74 #include "gimple-pretty-print.h"
75 #include "tree-flow.h"
76 #include "tree-dump.h"
77 #include "timevar.h"
78 #include "cfgloop.h"
79 #include "tree-pass.h"
80 #include "ggc.h"
81 #include "insn-config.h"
82 #include "recog.h"
83 #include "pointer-set.h"
84 #include "hashtab.h"
85 #include "tree-chrec.h"
86 #include "tree-scalar-evolution.h"
87 #include "cfgloop.h"
88 #include "params.h"
89 #include "langhooks.h"
90 #include "tree-affine.h"
91 #include "target.h"
92 #include "tree-ssa-propagate.h"
94 /* FIXME: Expressions are expanded to RTL in this pass to determine the
95 cost of different addressing modes. This should be moved to a TBD
96 interface between the GIMPLE and RTL worlds. */
97 #include "expr.h"
99 /* The infinite cost. */
100 #define INFTY 10000000
102 /* The expected number of loop iterations. TODO -- use profiling instead of
103 this. */
104 #define AVG_LOOP_NITER(LOOP) 5
107 /* Representation of the induction variable. */
108 struct iv
110 tree base; /* Initial value of the iv. */
111 tree base_object; /* A memory object to that the induction variable points. */
112 tree step; /* Step of the iv (constant only). */
113 tree ssa_name; /* The ssa name with the value. */
114 bool biv_p; /* Is it a biv? */
115 bool have_use_for; /* Do we already have a use for it? */
116 unsigned use_id; /* The identifier in the use if it is the case. */
119 /* Per-ssa version information (induction variable descriptions, etc.). */
120 struct version_info
122 tree name; /* The ssa name. */
123 struct iv *iv; /* Induction variable description. */
124 bool has_nonlin_use; /* For a loop-level invariant, whether it is used in
125 an expression that is not an induction variable. */
126 bool preserve_biv; /* For the original biv, whether to preserve it. */
127 unsigned inv_id; /* Id of an invariant. */
130 /* Types of uses. */
131 enum use_type
133 USE_NONLINEAR_EXPR, /* Use in a nonlinear expression. */
134 USE_ADDRESS, /* Use in an address. */
135 USE_COMPARE /* Use is a compare. */
138 /* Cost of a computation. */
139 typedef struct
141 int cost; /* The runtime cost. */
142 unsigned complexity; /* The estimate of the complexity of the code for
143 the computation (in no concrete units --
144 complexity field should be larger for more
145 complex expressions and addressing modes). */
146 } comp_cost;
148 static const comp_cost zero_cost = {0, 0};
149 static const comp_cost infinite_cost = {INFTY, INFTY};
151 /* The candidate - cost pair. */
152 struct cost_pair
154 struct iv_cand *cand; /* The candidate. */
155 comp_cost cost; /* The cost. */
156 bitmap depends_on; /* The list of invariants that have to be
157 preserved. */
158 tree value; /* For final value elimination, the expression for
159 the final value of the iv. For iv elimination,
160 the new bound to compare with. */
163 /* Use. */
164 struct iv_use
166 unsigned id; /* The id of the use. */
167 enum use_type type; /* Type of the use. */
168 struct iv *iv; /* The induction variable it is based on. */
169 gimple stmt; /* Statement in that it occurs. */
170 tree *op_p; /* The place where it occurs. */
171 bitmap related_cands; /* The set of "related" iv candidates, plus the common
172 important ones. */
174 unsigned n_map_members; /* Number of candidates in the cost_map list. */
175 struct cost_pair *cost_map;
176 /* The costs wrto the iv candidates. */
178 struct iv_cand *selected;
179 /* The selected candidate. */
182 /* The position where the iv is computed. */
183 enum iv_position
185 IP_NORMAL, /* At the end, just before the exit condition. */
186 IP_END, /* At the end of the latch block. */
187 IP_BEFORE_USE, /* Immediately before a specific use. */
188 IP_AFTER_USE, /* Immediately after a specific use. */
189 IP_ORIGINAL /* The original biv. */
192 /* The induction variable candidate. */
193 struct iv_cand
195 unsigned id; /* The number of the candidate. */
196 bool important; /* Whether this is an "important" candidate, i.e. such
197 that it should be considered by all uses. */
198 ENUM_BITFIELD(iv_position) pos : 8; /* Where it is computed. */
199 gimple incremented_at;/* For original biv, the statement where it is
200 incremented. */
201 tree var_before; /* The variable used for it before increment. */
202 tree var_after; /* The variable used for it after increment. */
203 struct iv *iv; /* The value of the candidate. NULL for
204 "pseudocandidate" used to indicate the possibility
205 to replace the final value of an iv by direct
206 computation of the value. */
207 unsigned cost; /* Cost of the candidate. */
208 unsigned cost_step; /* Cost of the candidate's increment operation. */
209 struct iv_use *ainc_use; /* For IP_{BEFORE,AFTER}_USE candidates, the place
210 where it is incremented. */
211 bitmap depends_on; /* The list of invariants that are used in step of the
212 biv. */
215 /* The data used by the induction variable optimizations. */
217 typedef struct iv_use *iv_use_p;
218 DEF_VEC_P(iv_use_p);
219 DEF_VEC_ALLOC_P(iv_use_p,heap);
221 typedef struct iv_cand *iv_cand_p;
222 DEF_VEC_P(iv_cand_p);
223 DEF_VEC_ALLOC_P(iv_cand_p,heap);
225 struct ivopts_data
227 /* The currently optimized loop. */
228 struct loop *current_loop;
230 /* Numbers of iterations for all exits of the current loop. */
231 struct pointer_map_t *niters;
233 /* Number of registers used in it. */
234 unsigned regs_used;
236 /* The size of version_info array allocated. */
237 unsigned version_info_size;
239 /* The array of information for the ssa names. */
240 struct version_info *version_info;
242 /* The bitmap of indices in version_info whose value was changed. */
243 bitmap relevant;
245 /* The uses of induction variables. */
246 VEC(iv_use_p,heap) *iv_uses;
248 /* The candidates. */
249 VEC(iv_cand_p,heap) *iv_candidates;
251 /* A bitmap of important candidates. */
252 bitmap important_candidates;
254 /* The maximum invariant id. */
255 unsigned max_inv_id;
257 /* Whether to consider just related and important candidates when replacing a
258 use. */
259 bool consider_all_candidates;
261 /* Are we optimizing for speed? */
262 bool speed;
264 /* Whether the loop body includes any function calls. */
265 bool body_includes_call;
268 /* An assignment of iv candidates to uses. */
270 struct iv_ca
272 /* The number of uses covered by the assignment. */
273 unsigned upto;
275 /* Number of uses that cannot be expressed by the candidates in the set. */
276 unsigned bad_uses;
278 /* Candidate assigned to a use, together with the related costs. */
279 struct cost_pair **cand_for_use;
281 /* Number of times each candidate is used. */
282 unsigned *n_cand_uses;
284 /* The candidates used. */
285 bitmap cands;
287 /* The number of candidates in the set. */
288 unsigned n_cands;
290 /* Total number of registers needed. */
291 unsigned n_regs;
293 /* Total cost of expressing uses. */
294 comp_cost cand_use_cost;
296 /* Total cost of candidates. */
297 unsigned cand_cost;
299 /* Number of times each invariant is used. */
300 unsigned *n_invariant_uses;
302 /* Total cost of the assignment. */
303 comp_cost cost;
306 /* Difference of two iv candidate assignments. */
308 struct iv_ca_delta
310 /* Changed use. */
311 struct iv_use *use;
313 /* An old assignment (for rollback purposes). */
314 struct cost_pair *old_cp;
316 /* A new assignment. */
317 struct cost_pair *new_cp;
319 /* Next change in the list. */
320 struct iv_ca_delta *next_change;
323 /* Bound on number of candidates below that all candidates are considered. */
325 #define CONSIDER_ALL_CANDIDATES_BOUND \
326 ((unsigned) PARAM_VALUE (PARAM_IV_CONSIDER_ALL_CANDIDATES_BOUND))
328 /* If there are more iv occurrences, we just give up (it is quite unlikely that
329 optimizing such a loop would help, and it would take ages). */
331 #define MAX_CONSIDERED_USES \
332 ((unsigned) PARAM_VALUE (PARAM_IV_MAX_CONSIDERED_USES))
334 /* If there are at most this number of ivs in the set, try removing unnecessary
335 ivs from the set always. */
337 #define ALWAYS_PRUNE_CAND_SET_BOUND \
338 ((unsigned) PARAM_VALUE (PARAM_IV_ALWAYS_PRUNE_CAND_SET_BOUND))
340 /* The list of trees for that the decl_rtl field must be reset is stored
341 here. */
343 static VEC(tree,heap) *decl_rtl_to_reset;
345 /* Number of uses recorded in DATA. */
347 static inline unsigned
348 n_iv_uses (struct ivopts_data *data)
350 return VEC_length (iv_use_p, data->iv_uses);
353 /* Ith use recorded in DATA. */
355 static inline struct iv_use *
356 iv_use (struct ivopts_data *data, unsigned i)
358 return VEC_index (iv_use_p, data->iv_uses, i);
361 /* Number of candidates recorded in DATA. */
363 static inline unsigned
364 n_iv_cands (struct ivopts_data *data)
366 return VEC_length (iv_cand_p, data->iv_candidates);
369 /* Ith candidate recorded in DATA. */
371 static inline struct iv_cand *
372 iv_cand (struct ivopts_data *data, unsigned i)
374 return VEC_index (iv_cand_p, data->iv_candidates, i);
377 /* The single loop exit if it dominates the latch, NULL otherwise. */
379 edge
380 single_dom_exit (struct loop *loop)
382 edge exit = single_exit (loop);
384 if (!exit)
385 return NULL;
387 if (!just_once_each_iteration_p (loop, exit->src))
388 return NULL;
390 return exit;
393 /* Dumps information about the induction variable IV to FILE. */
395 extern void dump_iv (FILE *, struct iv *);
396 void
397 dump_iv (FILE *file, struct iv *iv)
399 if (iv->ssa_name)
401 fprintf (file, "ssa name ");
402 print_generic_expr (file, iv->ssa_name, TDF_SLIM);
403 fprintf (file, "\n");
406 fprintf (file, " type ");
407 print_generic_expr (file, TREE_TYPE (iv->base), TDF_SLIM);
408 fprintf (file, "\n");
410 if (iv->step)
412 fprintf (file, " base ");
413 print_generic_expr (file, iv->base, TDF_SLIM);
414 fprintf (file, "\n");
416 fprintf (file, " step ");
417 print_generic_expr (file, iv->step, TDF_SLIM);
418 fprintf (file, "\n");
420 else
422 fprintf (file, " invariant ");
423 print_generic_expr (file, iv->base, TDF_SLIM);
424 fprintf (file, "\n");
427 if (iv->base_object)
429 fprintf (file, " base object ");
430 print_generic_expr (file, iv->base_object, TDF_SLIM);
431 fprintf (file, "\n");
434 if (iv->biv_p)
435 fprintf (file, " is a biv\n");
438 /* Dumps information about the USE to FILE. */
440 extern void dump_use (FILE *, struct iv_use *);
441 void
442 dump_use (FILE *file, struct iv_use *use)
444 fprintf (file, "use %d\n", use->id);
446 switch (use->type)
448 case USE_NONLINEAR_EXPR:
449 fprintf (file, " generic\n");
450 break;
452 case USE_ADDRESS:
453 fprintf (file, " address\n");
454 break;
456 case USE_COMPARE:
457 fprintf (file, " compare\n");
458 break;
460 default:
461 gcc_unreachable ();
464 fprintf (file, " in statement ");
465 print_gimple_stmt (file, use->stmt, 0, 0);
466 fprintf (file, "\n");
468 fprintf (file, " at position ");
469 if (use->op_p)
470 print_generic_expr (file, *use->op_p, TDF_SLIM);
471 fprintf (file, "\n");
473 dump_iv (file, use->iv);
475 if (use->related_cands)
477 fprintf (file, " related candidates ");
478 dump_bitmap (file, use->related_cands);
482 /* Dumps information about the uses to FILE. */
484 extern void dump_uses (FILE *, struct ivopts_data *);
485 void
486 dump_uses (FILE *file, struct ivopts_data *data)
488 unsigned i;
489 struct iv_use *use;
491 for (i = 0; i < n_iv_uses (data); i++)
493 use = iv_use (data, i);
495 dump_use (file, use);
496 fprintf (file, "\n");
500 /* Dumps information about induction variable candidate CAND to FILE. */
502 extern void dump_cand (FILE *, struct iv_cand *);
503 void
504 dump_cand (FILE *file, struct iv_cand *cand)
506 struct iv *iv = cand->iv;
508 fprintf (file, "candidate %d%s\n",
509 cand->id, cand->important ? " (important)" : "");
511 if (cand->depends_on)
513 fprintf (file, " depends on ");
514 dump_bitmap (file, cand->depends_on);
517 if (!iv)
519 fprintf (file, " final value replacement\n");
520 return;
523 switch (cand->pos)
525 case IP_NORMAL:
526 fprintf (file, " incremented before exit test\n");
527 break;
529 case IP_BEFORE_USE:
530 fprintf (file, " incremented before use %d\n", cand->ainc_use->id);
531 break;
533 case IP_AFTER_USE:
534 fprintf (file, " incremented after use %d\n", cand->ainc_use->id);
535 break;
537 case IP_END:
538 fprintf (file, " incremented at end\n");
539 break;
541 case IP_ORIGINAL:
542 fprintf (file, " original biv\n");
543 break;
546 dump_iv (file, iv);
549 /* Returns the info for ssa version VER. */
551 static inline struct version_info *
552 ver_info (struct ivopts_data *data, unsigned ver)
554 return data->version_info + ver;
557 /* Returns the info for ssa name NAME. */
559 static inline struct version_info *
560 name_info (struct ivopts_data *data, tree name)
562 return ver_info (data, SSA_NAME_VERSION (name));
565 /* Returns true if STMT is after the place where the IP_NORMAL ivs will be
566 emitted in LOOP. */
568 static bool
569 stmt_after_ip_normal_pos (struct loop *loop, gimple stmt)
571 basic_block bb = ip_normal_pos (loop), sbb = gimple_bb (stmt);
573 gcc_assert (bb);
575 if (sbb == loop->latch)
576 return true;
578 if (sbb != bb)
579 return false;
581 return stmt == last_stmt (bb);
584 /* Returns true if STMT if after the place where the original induction
585 variable CAND is incremented. If TRUE_IF_EQUAL is set, we return true
586 if the positions are identical. */
588 static bool
589 stmt_after_inc_pos (struct iv_cand *cand, gimple stmt, bool true_if_equal)
591 basic_block cand_bb = gimple_bb (cand->incremented_at);
592 basic_block stmt_bb = gimple_bb (stmt);
594 if (!dominated_by_p (CDI_DOMINATORS, stmt_bb, cand_bb))
595 return false;
597 if (stmt_bb != cand_bb)
598 return true;
600 if (true_if_equal
601 && gimple_uid (stmt) == gimple_uid (cand->incremented_at))
602 return true;
603 return gimple_uid (stmt) > gimple_uid (cand->incremented_at);
606 /* Returns true if STMT if after the place where the induction variable
607 CAND is incremented in LOOP. */
609 static bool
610 stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple stmt)
612 switch (cand->pos)
614 case IP_END:
615 return false;
617 case IP_NORMAL:
618 return stmt_after_ip_normal_pos (loop, stmt);
620 case IP_ORIGINAL:
621 case IP_AFTER_USE:
622 return stmt_after_inc_pos (cand, stmt, false);
624 case IP_BEFORE_USE:
625 return stmt_after_inc_pos (cand, stmt, true);
627 default:
628 gcc_unreachable ();
632 /* Returns true if EXP is a ssa name that occurs in an abnormal phi node. */
634 static bool
635 abnormal_ssa_name_p (tree exp)
637 if (!exp)
638 return false;
640 if (TREE_CODE (exp) != SSA_NAME)
641 return false;
643 return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (exp) != 0;
646 /* Returns false if BASE or INDEX contains a ssa name that occurs in an
647 abnormal phi node. Callback for for_each_index. */
649 static bool
650 idx_contains_abnormal_ssa_name_p (tree base, tree *index,
651 void *data ATTRIBUTE_UNUSED)
653 if (TREE_CODE (base) == ARRAY_REF || TREE_CODE (base) == ARRAY_RANGE_REF)
655 if (abnormal_ssa_name_p (TREE_OPERAND (base, 2)))
656 return false;
657 if (abnormal_ssa_name_p (TREE_OPERAND (base, 3)))
658 return false;
661 return !abnormal_ssa_name_p (*index);
664 /* Returns true if EXPR contains a ssa name that occurs in an
665 abnormal phi node. */
667 bool
668 contains_abnormal_ssa_name_p (tree expr)
670 enum tree_code code;
671 enum tree_code_class codeclass;
673 if (!expr)
674 return false;
676 code = TREE_CODE (expr);
677 codeclass = TREE_CODE_CLASS (code);
679 if (code == SSA_NAME)
680 return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr) != 0;
682 if (code == INTEGER_CST
683 || is_gimple_min_invariant (expr))
684 return false;
686 if (code == ADDR_EXPR)
687 return !for_each_index (&TREE_OPERAND (expr, 0),
688 idx_contains_abnormal_ssa_name_p,
689 NULL);
691 if (code == COND_EXPR)
692 return contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 0))
693 || contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 1))
694 || contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 2));
696 switch (codeclass)
698 case tcc_binary:
699 case tcc_comparison:
700 if (contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 1)))
701 return true;
703 /* Fallthru. */
704 case tcc_unary:
705 if (contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 0)))
706 return true;
708 break;
710 default:
711 gcc_unreachable ();
714 return false;
717 /* Returns tree describing number of iterations determined from
718 EXIT of DATA->current_loop, or NULL if something goes wrong. */
720 static tree
721 niter_for_exit (struct ivopts_data *data, edge exit)
723 struct tree_niter_desc desc;
724 tree niter;
725 void **slot;
727 if (!data->niters)
729 data->niters = pointer_map_create ();
730 slot = NULL;
732 else
733 slot = pointer_map_contains (data->niters, exit);
735 if (!slot)
737 /* Try to determine number of iterations. We must know it
738 unconditionally (i.e., without possibility of # of iterations
739 being zero). Also, we cannot safely work with ssa names that
740 appear in phi nodes on abnormal edges, so that we do not create
741 overlapping life ranges for them (PR 27283). */
742 if (number_of_iterations_exit (data->current_loop,
743 exit, &desc, true)
744 && integer_zerop (desc.may_be_zero)
745 && !contains_abnormal_ssa_name_p (desc.niter))
746 niter = desc.niter;
747 else
748 niter = NULL_TREE;
750 *pointer_map_insert (data->niters, exit) = niter;
752 else
753 niter = (tree) *slot;
755 return niter;
758 /* Returns tree describing number of iterations determined from
759 single dominating exit of DATA->current_loop, or NULL if something
760 goes wrong. */
762 static tree
763 niter_for_single_dom_exit (struct ivopts_data *data)
765 edge exit = single_dom_exit (data->current_loop);
767 if (!exit)
768 return NULL;
770 return niter_for_exit (data, exit);
773 /* Initializes data structures used by the iv optimization pass, stored
774 in DATA. */
776 static void
777 tree_ssa_iv_optimize_init (struct ivopts_data *data)
779 data->version_info_size = 2 * num_ssa_names;
780 data->version_info = XCNEWVEC (struct version_info, data->version_info_size);
781 data->relevant = BITMAP_ALLOC (NULL);
782 data->important_candidates = BITMAP_ALLOC (NULL);
783 data->max_inv_id = 0;
784 data->niters = NULL;
785 data->iv_uses = VEC_alloc (iv_use_p, heap, 20);
786 data->iv_candidates = VEC_alloc (iv_cand_p, heap, 20);
787 decl_rtl_to_reset = VEC_alloc (tree, heap, 20);
790 /* Returns a memory object to that EXPR points. In case we are able to
791 determine that it does not point to any such object, NULL is returned. */
793 static tree
794 determine_base_object (tree expr)
796 enum tree_code code = TREE_CODE (expr);
797 tree base, obj;
799 /* If this is a pointer casted to any type, we need to determine
800 the base object for the pointer; so handle conversions before
801 throwing away non-pointer expressions. */
802 if (CONVERT_EXPR_P (expr))
803 return determine_base_object (TREE_OPERAND (expr, 0));
805 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
806 return NULL_TREE;
808 switch (code)
810 case INTEGER_CST:
811 return NULL_TREE;
813 case ADDR_EXPR:
814 obj = TREE_OPERAND (expr, 0);
815 base = get_base_address (obj);
817 if (!base)
818 return expr;
820 if (TREE_CODE (base) == MEM_REF)
821 return determine_base_object (TREE_OPERAND (base, 0));
823 return fold_convert (ptr_type_node,
824 build_fold_addr_expr (base));
826 case POINTER_PLUS_EXPR:
827 return determine_base_object (TREE_OPERAND (expr, 0));
829 case PLUS_EXPR:
830 case MINUS_EXPR:
831 /* Pointer addition is done solely using POINTER_PLUS_EXPR. */
832 gcc_unreachable ();
834 default:
835 return fold_convert (ptr_type_node, expr);
839 /* Allocates an induction variable with given initial value BASE and step STEP
840 for loop LOOP. */
842 static struct iv *
843 alloc_iv (tree base, tree step)
845 struct iv *iv = XCNEW (struct iv);
846 gcc_assert (step != NULL_TREE);
848 iv->base = base;
849 iv->base_object = determine_base_object (base);
850 iv->step = step;
851 iv->biv_p = false;
852 iv->have_use_for = false;
853 iv->use_id = 0;
854 iv->ssa_name = NULL_TREE;
856 return iv;
859 /* Sets STEP and BASE for induction variable IV. */
861 static void
862 set_iv (struct ivopts_data *data, tree iv, tree base, tree step)
864 struct version_info *info = name_info (data, iv);
866 gcc_assert (!info->iv);
868 bitmap_set_bit (data->relevant, SSA_NAME_VERSION (iv));
869 info->iv = alloc_iv (base, step);
870 info->iv->ssa_name = iv;
873 /* Finds induction variable declaration for VAR. */
875 static struct iv *
876 get_iv (struct ivopts_data *data, tree var)
878 basic_block bb;
879 tree type = TREE_TYPE (var);
881 if (!POINTER_TYPE_P (type)
882 && !INTEGRAL_TYPE_P (type))
883 return NULL;
885 if (!name_info (data, var)->iv)
887 bb = gimple_bb (SSA_NAME_DEF_STMT (var));
889 if (!bb
890 || !flow_bb_inside_loop_p (data->current_loop, bb))
891 set_iv (data, var, var, build_int_cst (type, 0));
894 return name_info (data, var)->iv;
897 /* Determines the step of a biv defined in PHI. Returns NULL if PHI does
898 not define a simple affine biv with nonzero step. */
900 static tree
901 determine_biv_step (gimple phi)
903 struct loop *loop = gimple_bb (phi)->loop_father;
904 tree name = PHI_RESULT (phi);
905 affine_iv iv;
907 if (!is_gimple_reg (name))
908 return NULL_TREE;
910 if (!simple_iv (loop, loop, name, &iv, true))
911 return NULL_TREE;
913 return integer_zerop (iv.step) ? NULL_TREE : iv.step;
916 /* Finds basic ivs. */
918 static bool
919 find_bivs (struct ivopts_data *data)
921 gimple phi;
922 tree step, type, base;
923 bool found = false;
924 struct loop *loop = data->current_loop;
925 gimple_stmt_iterator psi;
927 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
929 phi = gsi_stmt (psi);
931 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
932 continue;
934 step = determine_biv_step (phi);
935 if (!step)
936 continue;
938 base = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop));
939 base = expand_simple_operations (base);
940 if (contains_abnormal_ssa_name_p (base)
941 || contains_abnormal_ssa_name_p (step))
942 continue;
944 type = TREE_TYPE (PHI_RESULT (phi));
945 base = fold_convert (type, base);
946 if (step)
948 if (POINTER_TYPE_P (type))
949 step = fold_convert (sizetype, step);
950 else
951 step = fold_convert (type, step);
954 set_iv (data, PHI_RESULT (phi), base, step);
955 found = true;
958 return found;
961 /* Marks basic ivs. */
963 static void
964 mark_bivs (struct ivopts_data *data)
966 gimple phi;
967 tree var;
968 struct iv *iv, *incr_iv;
969 struct loop *loop = data->current_loop;
970 basic_block incr_bb;
971 gimple_stmt_iterator psi;
973 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
975 phi = gsi_stmt (psi);
977 iv = get_iv (data, PHI_RESULT (phi));
978 if (!iv)
979 continue;
981 var = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (loop));
982 incr_iv = get_iv (data, var);
983 if (!incr_iv)
984 continue;
986 /* If the increment is in the subloop, ignore it. */
987 incr_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
988 if (incr_bb->loop_father != data->current_loop
989 || (incr_bb->flags & BB_IRREDUCIBLE_LOOP))
990 continue;
992 iv->biv_p = true;
993 incr_iv->biv_p = true;
997 /* Checks whether STMT defines a linear induction variable and stores its
998 parameters to IV. */
1000 static bool
1001 find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
1003 tree lhs;
1004 struct loop *loop = data->current_loop;
1006 iv->base = NULL_TREE;
1007 iv->step = NULL_TREE;
1009 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1010 return false;
1012 lhs = gimple_assign_lhs (stmt);
1013 if (TREE_CODE (lhs) != SSA_NAME)
1014 return false;
1016 if (!simple_iv (loop, loop_containing_stmt (stmt), lhs, iv, true))
1017 return false;
1018 iv->base = expand_simple_operations (iv->base);
1020 if (contains_abnormal_ssa_name_p (iv->base)
1021 || contains_abnormal_ssa_name_p (iv->step))
1022 return false;
1024 return true;
1027 /* Finds general ivs in statement STMT. */
1029 static void
1030 find_givs_in_stmt (struct ivopts_data *data, gimple stmt)
1032 affine_iv iv;
1034 if (!find_givs_in_stmt_scev (data, stmt, &iv))
1035 return;
1037 set_iv (data, gimple_assign_lhs (stmt), iv.base, iv.step);
1040 /* Finds general ivs in basic block BB. */
1042 static void
1043 find_givs_in_bb (struct ivopts_data *data, basic_block bb)
1045 gimple_stmt_iterator bsi;
1047 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1048 find_givs_in_stmt (data, gsi_stmt (bsi));
1051 /* Finds general ivs. */
1053 static void
1054 find_givs (struct ivopts_data *data)
1056 struct loop *loop = data->current_loop;
1057 basic_block *body = get_loop_body_in_dom_order (loop);
1058 unsigned i;
1060 for (i = 0; i < loop->num_nodes; i++)
1061 find_givs_in_bb (data, body[i]);
1062 free (body);
1065 /* For each ssa name defined in LOOP determines whether it is an induction
1066 variable and if so, its initial value and step. */
1068 static bool
1069 find_induction_variables (struct ivopts_data *data)
1071 unsigned i;
1072 bitmap_iterator bi;
1074 if (!find_bivs (data))
1075 return false;
1077 find_givs (data);
1078 mark_bivs (data);
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1082 tree niter = niter_for_single_dom_exit (data);
1084 if (niter)
1086 fprintf (dump_file, " number of iterations ");
1087 print_generic_expr (dump_file, niter, TDF_SLIM);
1088 fprintf (dump_file, "\n\n");
1091 fprintf (dump_file, "Induction variables:\n\n");
1093 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
1095 if (ver_info (data, i)->iv)
1096 dump_iv (dump_file, ver_info (data, i)->iv);
1100 return true;
1103 /* Records a use of type USE_TYPE at *USE_P in STMT whose value is IV. */
1105 static struct iv_use *
1106 record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
1107 gimple stmt, enum use_type use_type)
1109 struct iv_use *use = XCNEW (struct iv_use);
1111 use->id = n_iv_uses (data);
1112 use->type = use_type;
1113 use->iv = iv;
1114 use->stmt = stmt;
1115 use->op_p = use_p;
1116 use->related_cands = BITMAP_ALLOC (NULL);
1118 /* To avoid showing ssa name in the dumps, if it was not reset by the
1119 caller. */
1120 iv->ssa_name = NULL_TREE;
1122 if (dump_file && (dump_flags & TDF_DETAILS))
1123 dump_use (dump_file, use);
1125 VEC_safe_push (iv_use_p, heap, data->iv_uses, use);
1127 return use;
1130 /* Checks whether OP is a loop-level invariant and if so, records it.
1131 NONLINEAR_USE is true if the invariant is used in a way we do not
1132 handle specially. */
1134 static void
1135 record_invariant (struct ivopts_data *data, tree op, bool nonlinear_use)
1137 basic_block bb;
1138 struct version_info *info;
1140 if (TREE_CODE (op) != SSA_NAME
1141 || !is_gimple_reg (op))
1142 return;
1144 bb = gimple_bb (SSA_NAME_DEF_STMT (op));
1145 if (bb
1146 && flow_bb_inside_loop_p (data->current_loop, bb))
1147 return;
1149 info = name_info (data, op);
1150 info->name = op;
1151 info->has_nonlin_use |= nonlinear_use;
1152 if (!info->inv_id)
1153 info->inv_id = ++data->max_inv_id;
1154 bitmap_set_bit (data->relevant, SSA_NAME_VERSION (op));
1157 /* Checks whether the use OP is interesting and if so, records it. */
1159 static struct iv_use *
1160 find_interesting_uses_op (struct ivopts_data *data, tree op)
1162 struct iv *iv;
1163 struct iv *civ;
1164 gimple stmt;
1165 struct iv_use *use;
1167 if (TREE_CODE (op) != SSA_NAME)
1168 return NULL;
1170 iv = get_iv (data, op);
1171 if (!iv)
1172 return NULL;
1174 if (iv->have_use_for)
1176 use = iv_use (data, iv->use_id);
1178 gcc_assert (use->type == USE_NONLINEAR_EXPR);
1179 return use;
1182 if (integer_zerop (iv->step))
1184 record_invariant (data, op, true);
1185 return NULL;
1187 iv->have_use_for = true;
1189 civ = XNEW (struct iv);
1190 *civ = *iv;
1192 stmt = SSA_NAME_DEF_STMT (op);
1193 gcc_assert (gimple_code (stmt) == GIMPLE_PHI
1194 || is_gimple_assign (stmt));
1196 use = record_use (data, NULL, civ, stmt, USE_NONLINEAR_EXPR);
1197 iv->use_id = use->id;
1199 return use;
1202 /* Given a condition in statement STMT, checks whether it is a compare
1203 of an induction variable and an invariant. If this is the case,
1204 CONTROL_VAR is set to location of the iv, BOUND to the location of
1205 the invariant, IV_VAR and IV_BOUND are set to the corresponding
1206 induction variable descriptions, and true is returned. If this is not
1207 the case, CONTROL_VAR and BOUND are set to the arguments of the
1208 condition and false is returned. */
1210 static bool
1211 extract_cond_operands (struct ivopts_data *data, gimple stmt,
1212 tree **control_var, tree **bound,
1213 struct iv **iv_var, struct iv **iv_bound)
1215 /* The objects returned when COND has constant operands. */
1216 static struct iv const_iv;
1217 static tree zero;
1218 tree *op0 = &zero, *op1 = &zero, *tmp_op;
1219 struct iv *iv0 = &const_iv, *iv1 = &const_iv, *tmp_iv;
1220 bool ret = false;
1222 if (gimple_code (stmt) == GIMPLE_COND)
1224 op0 = gimple_cond_lhs_ptr (stmt);
1225 op1 = gimple_cond_rhs_ptr (stmt);
1227 else
1229 op0 = gimple_assign_rhs1_ptr (stmt);
1230 op1 = gimple_assign_rhs2_ptr (stmt);
1233 zero = integer_zero_node;
1234 const_iv.step = integer_zero_node;
1236 if (TREE_CODE (*op0) == SSA_NAME)
1237 iv0 = get_iv (data, *op0);
1238 if (TREE_CODE (*op1) == SSA_NAME)
1239 iv1 = get_iv (data, *op1);
1241 /* Exactly one of the compared values must be an iv, and the other one must
1242 be an invariant. */
1243 if (!iv0 || !iv1)
1244 goto end;
1246 if (integer_zerop (iv0->step))
1248 /* Control variable may be on the other side. */
1249 tmp_op = op0; op0 = op1; op1 = tmp_op;
1250 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
1252 ret = !integer_zerop (iv0->step) && integer_zerop (iv1->step);
1254 end:
1255 if (control_var)
1256 *control_var = op0;;
1257 if (iv_var)
1258 *iv_var = iv0;;
1259 if (bound)
1260 *bound = op1;
1261 if (iv_bound)
1262 *iv_bound = iv1;
1264 return ret;
1267 /* Checks whether the condition in STMT is interesting and if so,
1268 records it. */
1270 static void
1271 find_interesting_uses_cond (struct ivopts_data *data, gimple stmt)
1273 tree *var_p, *bound_p;
1274 struct iv *var_iv, *civ;
1276 if (!extract_cond_operands (data, stmt, &var_p, &bound_p, &var_iv, NULL))
1278 find_interesting_uses_op (data, *var_p);
1279 find_interesting_uses_op (data, *bound_p);
1280 return;
1283 civ = XNEW (struct iv);
1284 *civ = *var_iv;
1285 record_use (data, NULL, civ, stmt, USE_COMPARE);
1288 /* Returns true if expression EXPR is obviously invariant in LOOP,
1289 i.e. if all its operands are defined outside of the LOOP. LOOP
1290 should not be the function body. */
1292 bool
1293 expr_invariant_in_loop_p (struct loop *loop, tree expr)
1295 basic_block def_bb;
1296 unsigned i, len;
1298 gcc_assert (loop_depth (loop) > 0);
1300 if (is_gimple_min_invariant (expr))
1301 return true;
1303 if (TREE_CODE (expr) == SSA_NAME)
1305 def_bb = gimple_bb (SSA_NAME_DEF_STMT (expr));
1306 if (def_bb
1307 && flow_bb_inside_loop_p (loop, def_bb))
1308 return false;
1310 return true;
1313 if (!EXPR_P (expr))
1314 return false;
1316 len = TREE_OPERAND_LENGTH (expr);
1317 for (i = 0; i < len; i++)
1318 if (!expr_invariant_in_loop_p (loop, TREE_OPERAND (expr, i)))
1319 return false;
1321 return true;
1324 /* Returns true if statement STMT is obviously invariant in LOOP,
1325 i.e. if all its operands on the RHS are defined outside of the LOOP.
1326 LOOP should not be the function body. */
1328 bool
1329 stmt_invariant_in_loop_p (struct loop *loop, gimple stmt)
1331 unsigned i;
1332 tree lhs;
1334 gcc_assert (loop_depth (loop) > 0);
1336 lhs = gimple_get_lhs (stmt);
1337 for (i = 0; i < gimple_num_ops (stmt); i++)
1339 tree op = gimple_op (stmt, i);
1340 if (op != lhs && !expr_invariant_in_loop_p (loop, op))
1341 return false;
1344 return true;
1347 /* Cumulates the steps of indices into DATA and replaces their values with the
1348 initial ones. Returns false when the value of the index cannot be determined.
1349 Callback for for_each_index. */
1351 struct ifs_ivopts_data
1353 struct ivopts_data *ivopts_data;
1354 gimple stmt;
1355 tree step;
1358 static bool
1359 idx_find_step (tree base, tree *idx, void *data)
1361 struct ifs_ivopts_data *dta = (struct ifs_ivopts_data *) data;
1362 struct iv *iv;
1363 tree step, iv_base, iv_step, lbound, off;
1364 struct loop *loop = dta->ivopts_data->current_loop;
1366 if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF)
1367 return false;
1369 /* If base is a component ref, require that the offset of the reference
1370 be invariant. */
1371 if (TREE_CODE (base) == COMPONENT_REF)
1373 off = component_ref_field_offset (base);
1374 return expr_invariant_in_loop_p (loop, off);
1377 /* If base is array, first check whether we will be able to move the
1378 reference out of the loop (in order to take its address in strength
1379 reduction). In order for this to work we need both lower bound
1380 and step to be loop invariants. */
1381 if (TREE_CODE (base) == ARRAY_REF || TREE_CODE (base) == ARRAY_RANGE_REF)
1383 /* Moreover, for a range, the size needs to be invariant as well. */
1384 if (TREE_CODE (base) == ARRAY_RANGE_REF
1385 && !expr_invariant_in_loop_p (loop, TYPE_SIZE (TREE_TYPE (base))))
1386 return false;
1388 step = array_ref_element_size (base);
1389 lbound = array_ref_low_bound (base);
1391 if (!expr_invariant_in_loop_p (loop, step)
1392 || !expr_invariant_in_loop_p (loop, lbound))
1393 return false;
1396 if (TREE_CODE (*idx) != SSA_NAME)
1397 return true;
1399 iv = get_iv (dta->ivopts_data, *idx);
1400 if (!iv)
1401 return false;
1403 /* XXX We produce for a base of *D42 with iv->base being &x[0]
1404 *&x[0], which is not folded and does not trigger the
1405 ARRAY_REF path below. */
1406 *idx = iv->base;
1408 if (integer_zerop (iv->step))
1409 return true;
1411 if (TREE_CODE (base) == ARRAY_REF || TREE_CODE (base) == ARRAY_RANGE_REF)
1413 step = array_ref_element_size (base);
1415 /* We only handle addresses whose step is an integer constant. */
1416 if (TREE_CODE (step) != INTEGER_CST)
1417 return false;
1419 else
1420 /* The step for pointer arithmetics already is 1 byte. */
1421 step = size_one_node;
1423 iv_base = iv->base;
1424 iv_step = iv->step;
1425 if (!convert_affine_scev (dta->ivopts_data->current_loop,
1426 sizetype, &iv_base, &iv_step, dta->stmt,
1427 false))
1429 /* The index might wrap. */
1430 return false;
1433 step = fold_build2 (MULT_EXPR, sizetype, step, iv_step);
1434 dta->step = fold_build2 (PLUS_EXPR, sizetype, dta->step, step);
1436 return true;
1439 /* Records use in index IDX. Callback for for_each_index. Ivopts data
1440 object is passed to it in DATA. */
1442 static bool
1443 idx_record_use (tree base, tree *idx,
1444 void *vdata)
1446 struct ivopts_data *data = (struct ivopts_data *) vdata;
1447 find_interesting_uses_op (data, *idx);
1448 if (TREE_CODE (base) == ARRAY_REF || TREE_CODE (base) == ARRAY_RANGE_REF)
1450 find_interesting_uses_op (data, array_ref_element_size (base));
1451 find_interesting_uses_op (data, array_ref_low_bound (base));
1453 return true;
1456 /* If we can prove that TOP = cst * BOT for some constant cst,
1457 store cst to MUL and return true. Otherwise return false.
1458 The returned value is always sign-extended, regardless of the
1459 signedness of TOP and BOT. */
1461 static bool
1462 constant_multiple_of (tree top, tree bot, double_int *mul)
1464 tree mby;
1465 enum tree_code code;
1466 double_int res, p0, p1;
1467 unsigned precision = TYPE_PRECISION (TREE_TYPE (top));
1469 STRIP_NOPS (top);
1470 STRIP_NOPS (bot);
1472 if (operand_equal_p (top, bot, 0))
1474 *mul = double_int_one;
1475 return true;
1478 code = TREE_CODE (top);
1479 switch (code)
1481 case MULT_EXPR:
1482 mby = TREE_OPERAND (top, 1);
1483 if (TREE_CODE (mby) != INTEGER_CST)
1484 return false;
1486 if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &res))
1487 return false;
1489 *mul = double_int_sext (double_int_mul (res, tree_to_double_int (mby)),
1490 precision);
1491 return true;
1493 case PLUS_EXPR:
1494 case MINUS_EXPR:
1495 if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &p0)
1496 || !constant_multiple_of (TREE_OPERAND (top, 1), bot, &p1))
1497 return false;
1499 if (code == MINUS_EXPR)
1500 p1 = double_int_neg (p1);
1501 *mul = double_int_sext (double_int_add (p0, p1), precision);
1502 return true;
1504 case INTEGER_CST:
1505 if (TREE_CODE (bot) != INTEGER_CST)
1506 return false;
1508 p0 = double_int_sext (tree_to_double_int (top), precision);
1509 p1 = double_int_sext (tree_to_double_int (bot), precision);
1510 if (double_int_zero_p (p1))
1511 return false;
1512 *mul = double_int_sext (double_int_sdivmod (p0, p1, FLOOR_DIV_EXPR, &res),
1513 precision);
1514 return double_int_zero_p (res);
1516 default:
1517 return false;
1521 /* Returns true if memory reference REF with step STEP may be unaligned. */
1523 static bool
1524 may_be_unaligned_p (tree ref, tree step)
1526 tree base;
1527 tree base_type;
1528 HOST_WIDE_INT bitsize;
1529 HOST_WIDE_INT bitpos;
1530 tree toffset;
1531 enum machine_mode mode;
1532 int unsignedp, volatilep;
1533 unsigned base_align;
1535 /* TARGET_MEM_REFs are translated directly to valid MEMs on the target,
1536 thus they are not misaligned. */
1537 if (TREE_CODE (ref) == TARGET_MEM_REF)
1538 return false;
1540 /* The test below is basically copy of what expr.c:normal_inner_ref
1541 does to check whether the object must be loaded by parts when
1542 STRICT_ALIGNMENT is true. */
1543 base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
1544 &unsignedp, &volatilep, true);
1545 base_type = TREE_TYPE (base);
1546 base_align = TYPE_ALIGN (base_type);
1548 if (mode != BLKmode)
1550 unsigned mode_align = GET_MODE_ALIGNMENT (mode);
1552 if (base_align < mode_align
1553 || (bitpos % mode_align) != 0
1554 || (bitpos % BITS_PER_UNIT) != 0)
1555 return true;
1557 if (toffset
1558 && (highest_pow2_factor (toffset) * BITS_PER_UNIT) < mode_align)
1559 return true;
1561 if ((highest_pow2_factor (step) * BITS_PER_UNIT) < mode_align)
1562 return true;
1565 return false;
1568 /* Return true if EXPR may be non-addressable. */
1570 static bool
1571 may_be_nonaddressable_p (tree expr)
1573 switch (TREE_CODE (expr))
1575 case TARGET_MEM_REF:
1576 /* TARGET_MEM_REFs are translated directly to valid MEMs on the
1577 target, thus they are always addressable. */
1578 return false;
1580 case COMPONENT_REF:
1581 return DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1))
1582 || may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
1584 case VIEW_CONVERT_EXPR:
1585 /* This kind of view-conversions may wrap non-addressable objects
1586 and make them look addressable. After some processing the
1587 non-addressability may be uncovered again, causing ADDR_EXPRs
1588 of inappropriate objects to be built. */
1589 if (is_gimple_reg (TREE_OPERAND (expr, 0))
1590 || !is_gimple_addressable (TREE_OPERAND (expr, 0)))
1591 return true;
1593 /* ... fall through ... */
1595 case ARRAY_REF:
1596 case ARRAY_RANGE_REF:
1597 return may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
1599 CASE_CONVERT:
1600 return true;
1602 default:
1603 break;
1606 return false;
1609 /* Finds addresses in *OP_P inside STMT. */
1611 static void
1612 find_interesting_uses_address (struct ivopts_data *data, gimple stmt, tree *op_p)
1614 tree base = *op_p, step = size_zero_node;
1615 struct iv *civ;
1616 struct ifs_ivopts_data ifs_ivopts_data;
1618 /* Do not play with volatile memory references. A bit too conservative,
1619 perhaps, but safe. */
1620 if (gimple_has_volatile_ops (stmt))
1621 goto fail;
1623 /* Ignore bitfields for now. Not really something terribly complicated
1624 to handle. TODO. */
1625 if (TREE_CODE (base) == BIT_FIELD_REF)
1626 goto fail;
1628 base = unshare_expr (base);
1630 if (TREE_CODE (base) == TARGET_MEM_REF)
1632 tree type = build_pointer_type (TREE_TYPE (base));
1633 tree astep;
1635 if (TMR_BASE (base)
1636 && TREE_CODE (TMR_BASE (base)) == SSA_NAME)
1638 civ = get_iv (data, TMR_BASE (base));
1639 if (!civ)
1640 goto fail;
1642 TMR_BASE (base) = civ->base;
1643 step = civ->step;
1645 if (TMR_INDEX (base)
1646 && TREE_CODE (TMR_INDEX (base)) == SSA_NAME)
1648 civ = get_iv (data, TMR_INDEX (base));
1649 if (!civ)
1650 goto fail;
1652 TMR_INDEX (base) = civ->base;
1653 astep = civ->step;
1655 if (astep)
1657 if (TMR_STEP (base))
1658 astep = fold_build2 (MULT_EXPR, type, TMR_STEP (base), astep);
1660 step = fold_build2 (PLUS_EXPR, type, step, astep);
1664 if (integer_zerop (step))
1665 goto fail;
1666 base = tree_mem_ref_addr (type, base);
1668 else
1670 ifs_ivopts_data.ivopts_data = data;
1671 ifs_ivopts_data.stmt = stmt;
1672 ifs_ivopts_data.step = size_zero_node;
1673 if (!for_each_index (&base, idx_find_step, &ifs_ivopts_data)
1674 || integer_zerop (ifs_ivopts_data.step))
1675 goto fail;
1676 step = ifs_ivopts_data.step;
1678 gcc_assert (TREE_CODE (base) != MISALIGNED_INDIRECT_REF);
1680 /* Check that the base expression is addressable. This needs
1681 to be done after substituting bases of IVs into it. */
1682 if (may_be_nonaddressable_p (base))
1683 goto fail;
1685 /* Moreover, on strict alignment platforms, check that it is
1686 sufficiently aligned. */
1687 if (STRICT_ALIGNMENT && may_be_unaligned_p (base, step))
1688 goto fail;
1690 base = build_fold_addr_expr (base);
1692 /* Substituting bases of IVs into the base expression might
1693 have caused folding opportunities. */
1694 if (TREE_CODE (base) == ADDR_EXPR)
1696 tree *ref = &TREE_OPERAND (base, 0);
1697 while (handled_component_p (*ref))
1698 ref = &TREE_OPERAND (*ref, 0);
1699 if (TREE_CODE (*ref) == MEM_REF)
1701 tree tem = fold_binary (MEM_REF, TREE_TYPE (*ref),
1702 TREE_OPERAND (*ref, 0),
1703 TREE_OPERAND (*ref, 1));
1704 if (tem)
1705 *ref = tem;
1710 civ = alloc_iv (base, step);
1711 record_use (data, op_p, civ, stmt, USE_ADDRESS);
1712 return;
1714 fail:
1715 for_each_index (op_p, idx_record_use, data);
1718 /* Finds and records invariants used in STMT. */
1720 static void
1721 find_invariants_stmt (struct ivopts_data *data, gimple stmt)
1723 ssa_op_iter iter;
1724 use_operand_p use_p;
1725 tree op;
1727 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
1729 op = USE_FROM_PTR (use_p);
1730 record_invariant (data, op, false);
1734 /* Finds interesting uses of induction variables in the statement STMT. */
1736 static void
1737 find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
1739 struct iv *iv;
1740 tree op, *lhs, *rhs;
1741 ssa_op_iter iter;
1742 use_operand_p use_p;
1743 enum tree_code code;
1745 find_invariants_stmt (data, stmt);
1747 if (gimple_code (stmt) == GIMPLE_COND)
1749 find_interesting_uses_cond (data, stmt);
1750 return;
1753 if (is_gimple_assign (stmt))
1755 lhs = gimple_assign_lhs_ptr (stmt);
1756 rhs = gimple_assign_rhs1_ptr (stmt);
1758 if (TREE_CODE (*lhs) == SSA_NAME)
1760 /* If the statement defines an induction variable, the uses are not
1761 interesting by themselves. */
1763 iv = get_iv (data, *lhs);
1765 if (iv && !integer_zerop (iv->step))
1766 return;
1769 code = gimple_assign_rhs_code (stmt);
1770 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
1771 && (REFERENCE_CLASS_P (*rhs)
1772 || is_gimple_val (*rhs)))
1774 if (REFERENCE_CLASS_P (*rhs))
1775 find_interesting_uses_address (data, stmt, rhs);
1776 else
1777 find_interesting_uses_op (data, *rhs);
1779 if (REFERENCE_CLASS_P (*lhs))
1780 find_interesting_uses_address (data, stmt, lhs);
1781 return;
1783 else if (TREE_CODE_CLASS (code) == tcc_comparison)
1785 find_interesting_uses_cond (data, stmt);
1786 return;
1789 /* TODO -- we should also handle address uses of type
1791 memory = call (whatever);
1795 call (memory). */
1798 if (gimple_code (stmt) == GIMPLE_PHI
1799 && gimple_bb (stmt) == data->current_loop->header)
1801 iv = get_iv (data, PHI_RESULT (stmt));
1803 if (iv && !integer_zerop (iv->step))
1804 return;
1807 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
1809 op = USE_FROM_PTR (use_p);
1811 if (TREE_CODE (op) != SSA_NAME)
1812 continue;
1814 iv = get_iv (data, op);
1815 if (!iv)
1816 continue;
1818 find_interesting_uses_op (data, op);
1822 /* Finds interesting uses of induction variables outside of loops
1823 on loop exit edge EXIT. */
1825 static void
1826 find_interesting_uses_outside (struct ivopts_data *data, edge exit)
1828 gimple phi;
1829 gimple_stmt_iterator psi;
1830 tree def;
1832 for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
1834 phi = gsi_stmt (psi);
1835 def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
1836 if (is_gimple_reg (def))
1837 find_interesting_uses_op (data, def);
1841 /* Finds uses of the induction variables that are interesting. */
1843 static void
1844 find_interesting_uses (struct ivopts_data *data)
1846 basic_block bb;
1847 gimple_stmt_iterator bsi;
1848 basic_block *body = get_loop_body (data->current_loop);
1849 unsigned i;
1850 struct version_info *info;
1851 edge e;
1853 if (dump_file && (dump_flags & TDF_DETAILS))
1854 fprintf (dump_file, "Uses:\n\n");
1856 for (i = 0; i < data->current_loop->num_nodes; i++)
1858 edge_iterator ei;
1859 bb = body[i];
1861 FOR_EACH_EDGE (e, ei, bb->succs)
1862 if (e->dest != EXIT_BLOCK_PTR
1863 && !flow_bb_inside_loop_p (data->current_loop, e->dest))
1864 find_interesting_uses_outside (data, e);
1866 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1867 find_interesting_uses_stmt (data, gsi_stmt (bsi));
1868 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1869 if (!is_gimple_debug (gsi_stmt (bsi)))
1870 find_interesting_uses_stmt (data, gsi_stmt (bsi));
1873 if (dump_file && (dump_flags & TDF_DETAILS))
1875 bitmap_iterator bi;
1877 fprintf (dump_file, "\n");
1879 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
1881 info = ver_info (data, i);
1882 if (info->inv_id)
1884 fprintf (dump_file, " ");
1885 print_generic_expr (dump_file, info->name, TDF_SLIM);
1886 fprintf (dump_file, " is invariant (%d)%s\n",
1887 info->inv_id, info->has_nonlin_use ? "" : ", eliminable");
1891 fprintf (dump_file, "\n");
1894 free (body);
1897 /* Strips constant offsets from EXPR and stores them to OFFSET. If INSIDE_ADDR
1898 is true, assume we are inside an address. If TOP_COMPREF is true, assume
1899 we are at the top-level of the processed address. */
1901 static tree
1902 strip_offset_1 (tree expr, bool inside_addr, bool top_compref,
1903 unsigned HOST_WIDE_INT *offset)
1905 tree op0 = NULL_TREE, op1 = NULL_TREE, tmp, step;
1906 enum tree_code code;
1907 tree type, orig_type = TREE_TYPE (expr);
1908 unsigned HOST_WIDE_INT off0, off1, st;
1909 tree orig_expr = expr;
1911 STRIP_NOPS (expr);
1913 type = TREE_TYPE (expr);
1914 code = TREE_CODE (expr);
1915 *offset = 0;
1917 switch (code)
1919 case INTEGER_CST:
1920 if (!cst_and_fits_in_hwi (expr)
1921 || integer_zerop (expr))
1922 return orig_expr;
1924 *offset = int_cst_value (expr);
1925 return build_int_cst (orig_type, 0);
1927 case POINTER_PLUS_EXPR:
1928 case PLUS_EXPR:
1929 case MINUS_EXPR:
1930 op0 = TREE_OPERAND (expr, 0);
1931 op1 = TREE_OPERAND (expr, 1);
1933 op0 = strip_offset_1 (op0, false, false, &off0);
1934 op1 = strip_offset_1 (op1, false, false, &off1);
1936 *offset = (code == MINUS_EXPR ? off0 - off1 : off0 + off1);
1937 if (op0 == TREE_OPERAND (expr, 0)
1938 && op1 == TREE_OPERAND (expr, 1))
1939 return orig_expr;
1941 if (integer_zerop (op1))
1942 expr = op0;
1943 else if (integer_zerop (op0))
1945 if (code == MINUS_EXPR)
1946 expr = fold_build1 (NEGATE_EXPR, type, op1);
1947 else
1948 expr = op1;
1950 else
1951 expr = fold_build2 (code, type, op0, op1);
1953 return fold_convert (orig_type, expr);
1955 case MULT_EXPR:
1956 op1 = TREE_OPERAND (expr, 1);
1957 if (!cst_and_fits_in_hwi (op1))
1958 return orig_expr;
1960 op0 = TREE_OPERAND (expr, 0);
1961 op0 = strip_offset_1 (op0, false, false, &off0);
1962 if (op0 == TREE_OPERAND (expr, 0))
1963 return orig_expr;
1965 *offset = off0 * int_cst_value (op1);
1966 if (integer_zerop (op0))
1967 expr = op0;
1968 else
1969 expr = fold_build2 (MULT_EXPR, type, op0, op1);
1971 return fold_convert (orig_type, expr);
1973 case ARRAY_REF:
1974 case ARRAY_RANGE_REF:
1975 if (!inside_addr)
1976 return orig_expr;
1978 step = array_ref_element_size (expr);
1979 if (!cst_and_fits_in_hwi (step))
1980 break;
1982 st = int_cst_value (step);
1983 op1 = TREE_OPERAND (expr, 1);
1984 op1 = strip_offset_1 (op1, false, false, &off1);
1985 *offset = off1 * st;
1987 if (top_compref
1988 && integer_zerop (op1))
1990 /* Strip the component reference completely. */
1991 op0 = TREE_OPERAND (expr, 0);
1992 op0 = strip_offset_1 (op0, inside_addr, top_compref, &off0);
1993 *offset += off0;
1994 return op0;
1996 break;
1998 case COMPONENT_REF:
1999 if (!inside_addr)
2000 return orig_expr;
2002 tmp = component_ref_field_offset (expr);
2003 if (top_compref
2004 && cst_and_fits_in_hwi (tmp))
2006 /* Strip the component reference completely. */
2007 op0 = TREE_OPERAND (expr, 0);
2008 op0 = strip_offset_1 (op0, inside_addr, top_compref, &off0);
2009 *offset = off0 + int_cst_value (tmp);
2010 return op0;
2012 break;
2014 case ADDR_EXPR:
2015 op0 = TREE_OPERAND (expr, 0);
2016 op0 = strip_offset_1 (op0, true, true, &off0);
2017 *offset += off0;
2019 if (op0 == TREE_OPERAND (expr, 0))
2020 return orig_expr;
2022 expr = build_fold_addr_expr (op0);
2023 return fold_convert (orig_type, expr);
2025 case MEM_REF:
2026 /* ??? Offset operand? */
2027 inside_addr = false;
2028 break;
2030 default:
2031 return orig_expr;
2034 /* Default handling of expressions for that we want to recurse into
2035 the first operand. */
2036 op0 = TREE_OPERAND (expr, 0);
2037 op0 = strip_offset_1 (op0, inside_addr, false, &off0);
2038 *offset += off0;
2040 if (op0 == TREE_OPERAND (expr, 0)
2041 && (!op1 || op1 == TREE_OPERAND (expr, 1)))
2042 return orig_expr;
2044 expr = copy_node (expr);
2045 TREE_OPERAND (expr, 0) = op0;
2046 if (op1)
2047 TREE_OPERAND (expr, 1) = op1;
2049 /* Inside address, we might strip the top level component references,
2050 thus changing type of the expression. Handling of ADDR_EXPR
2051 will fix that. */
2052 expr = fold_convert (orig_type, expr);
2054 return expr;
2057 /* Strips constant offsets from EXPR and stores them to OFFSET. */
2059 static tree
2060 strip_offset (tree expr, unsigned HOST_WIDE_INT *offset)
2062 return strip_offset_1 (expr, false, false, offset);
2065 /* Returns variant of TYPE that can be used as base for different uses.
2066 We return unsigned type with the same precision, which avoids problems
2067 with overflows. */
2069 static tree
2070 generic_type_for (tree type)
2072 if (POINTER_TYPE_P (type))
2073 return unsigned_type_for (type);
2075 if (TYPE_UNSIGNED (type))
2076 return type;
2078 return unsigned_type_for (type);
2081 /* Records invariants in *EXPR_P. Callback for walk_tree. DATA contains
2082 the bitmap to that we should store it. */
2084 static struct ivopts_data *fd_ivopts_data;
2085 static tree
2086 find_depends (tree *expr_p, int *ws ATTRIBUTE_UNUSED, void *data)
2088 bitmap *depends_on = (bitmap *) data;
2089 struct version_info *info;
2091 if (TREE_CODE (*expr_p) != SSA_NAME)
2092 return NULL_TREE;
2093 info = name_info (fd_ivopts_data, *expr_p);
2095 if (!info->inv_id || info->has_nonlin_use)
2096 return NULL_TREE;
2098 if (!*depends_on)
2099 *depends_on = BITMAP_ALLOC (NULL);
2100 bitmap_set_bit (*depends_on, info->inv_id);
2102 return NULL_TREE;
2105 /* Adds a candidate BASE + STEP * i. Important field is set to IMPORTANT and
2106 position to POS. If USE is not NULL, the candidate is set as related to
2107 it. If both BASE and STEP are NULL, we add a pseudocandidate for the
2108 replacement of the final value of the iv by a direct computation. */
2110 static struct iv_cand *
2111 add_candidate_1 (struct ivopts_data *data,
2112 tree base, tree step, bool important, enum iv_position pos,
2113 struct iv_use *use, gimple incremented_at)
2115 unsigned i;
2116 struct iv_cand *cand = NULL;
2117 tree type, orig_type;
2119 if (base)
2121 orig_type = TREE_TYPE (base);
2122 type = generic_type_for (orig_type);
2123 if (type != orig_type)
2125 base = fold_convert (type, base);
2126 step = fold_convert (type, step);
2130 for (i = 0; i < n_iv_cands (data); i++)
2132 cand = iv_cand (data, i);
2134 if (cand->pos != pos)
2135 continue;
2137 if (cand->incremented_at != incremented_at
2138 || ((pos == IP_AFTER_USE || pos == IP_BEFORE_USE)
2139 && cand->ainc_use != use))
2140 continue;
2142 if (!cand->iv)
2144 if (!base && !step)
2145 break;
2147 continue;
2150 if (!base && !step)
2151 continue;
2153 if (operand_equal_p (base, cand->iv->base, 0)
2154 && operand_equal_p (step, cand->iv->step, 0))
2155 break;
2158 if (i == n_iv_cands (data))
2160 cand = XCNEW (struct iv_cand);
2161 cand->id = i;
2163 if (!base && !step)
2164 cand->iv = NULL;
2165 else
2166 cand->iv = alloc_iv (base, step);
2168 cand->pos = pos;
2169 if (pos != IP_ORIGINAL && cand->iv)
2171 cand->var_before = create_tmp_var_raw (TREE_TYPE (base), "ivtmp");
2172 cand->var_after = cand->var_before;
2174 cand->important = important;
2175 cand->incremented_at = incremented_at;
2176 VEC_safe_push (iv_cand_p, heap, data->iv_candidates, cand);
2178 if (step
2179 && TREE_CODE (step) != INTEGER_CST)
2181 fd_ivopts_data = data;
2182 walk_tree (&step, find_depends, &cand->depends_on, NULL);
2185 if (pos == IP_AFTER_USE || pos == IP_BEFORE_USE)
2186 cand->ainc_use = use;
2187 else
2188 cand->ainc_use = NULL;
2190 if (dump_file && (dump_flags & TDF_DETAILS))
2191 dump_cand (dump_file, cand);
2194 if (important && !cand->important)
2196 cand->important = true;
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2198 fprintf (dump_file, "Candidate %d is important\n", cand->id);
2201 if (use)
2203 bitmap_set_bit (use->related_cands, i);
2204 if (dump_file && (dump_flags & TDF_DETAILS))
2205 fprintf (dump_file, "Candidate %d is related to use %d\n",
2206 cand->id, use->id);
2209 return cand;
2212 /* Returns true if incrementing the induction variable at the end of the LOOP
2213 is allowed.
2215 The purpose is to avoid splitting latch edge with a biv increment, thus
2216 creating a jump, possibly confusing other optimization passes and leaving
2217 less freedom to scheduler. So we allow IP_END_POS only if IP_NORMAL_POS
2218 is not available (so we do not have a better alternative), or if the latch
2219 edge is already nonempty. */
2221 static bool
2222 allow_ip_end_pos_p (struct loop *loop)
2224 if (!ip_normal_pos (loop))
2225 return true;
2227 if (!empty_block_p (ip_end_pos (loop)))
2228 return true;
2230 return false;
2233 /* If possible, adds autoincrement candidates BASE + STEP * i based on use USE.
2234 Important field is set to IMPORTANT. */
2236 static void
2237 add_autoinc_candidates (struct ivopts_data *data, tree base, tree step,
2238 bool important, struct iv_use *use)
2240 basic_block use_bb = gimple_bb (use->stmt);
2241 enum machine_mode mem_mode;
2242 unsigned HOST_WIDE_INT cstepi;
2244 /* If we insert the increment in any position other than the standard
2245 ones, we must ensure that it is incremented once per iteration.
2246 It must not be in an inner nested loop, or one side of an if
2247 statement. */
2248 if (use_bb->loop_father != data->current_loop
2249 || !dominated_by_p (CDI_DOMINATORS, data->current_loop->latch, use_bb)
2250 || stmt_could_throw_p (use->stmt)
2251 || !cst_and_fits_in_hwi (step))
2252 return;
2254 cstepi = int_cst_value (step);
2256 mem_mode = TYPE_MODE (TREE_TYPE (*use->op_p));
2257 if ((HAVE_PRE_INCREMENT && GET_MODE_SIZE (mem_mode) == cstepi)
2258 || (HAVE_PRE_DECREMENT && GET_MODE_SIZE (mem_mode) == -cstepi))
2260 enum tree_code code = MINUS_EXPR;
2261 tree new_base;
2262 tree new_step = step;
2264 if (POINTER_TYPE_P (TREE_TYPE (base)))
2266 new_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
2267 code = POINTER_PLUS_EXPR;
2269 else
2270 new_step = fold_convert (TREE_TYPE (base), new_step);
2271 new_base = fold_build2 (code, TREE_TYPE (base), base, new_step);
2272 add_candidate_1 (data, new_base, step, important, IP_BEFORE_USE, use,
2273 use->stmt);
2275 if ((HAVE_POST_INCREMENT && GET_MODE_SIZE (mem_mode) == cstepi)
2276 || (HAVE_POST_DECREMENT && GET_MODE_SIZE (mem_mode) == -cstepi))
2278 add_candidate_1 (data, base, step, important, IP_AFTER_USE, use,
2279 use->stmt);
2283 /* Adds a candidate BASE + STEP * i. Important field is set to IMPORTANT and
2284 position to POS. If USE is not NULL, the candidate is set as related to
2285 it. The candidate computation is scheduled on all available positions. */
2287 static void
2288 add_candidate (struct ivopts_data *data,
2289 tree base, tree step, bool important, struct iv_use *use)
2291 if (ip_normal_pos (data->current_loop))
2292 add_candidate_1 (data, base, step, important, IP_NORMAL, use, NULL);
2293 if (ip_end_pos (data->current_loop)
2294 && allow_ip_end_pos_p (data->current_loop))
2295 add_candidate_1 (data, base, step, important, IP_END, use, NULL);
2297 if (use != NULL && use->type == USE_ADDRESS)
2298 add_autoinc_candidates (data, base, step, important, use);
2301 /* Add a standard "0 + 1 * iteration" iv candidate for a
2302 type with SIZE bits. */
2304 static void
2305 add_standard_iv_candidates_for_size (struct ivopts_data *data,
2306 unsigned int size)
2308 tree type = lang_hooks.types.type_for_size (size, true);
2309 add_candidate (data, build_int_cst (type, 0), build_int_cst (type, 1),
2310 true, NULL);
2313 /* Adds standard iv candidates. */
2315 static void
2316 add_standard_iv_candidates (struct ivopts_data *data)
2318 add_standard_iv_candidates_for_size (data, INT_TYPE_SIZE);
2320 /* The same for a double-integer type if it is still fast enough. */
2321 if (BITS_PER_WORD >= INT_TYPE_SIZE * 2)
2322 add_standard_iv_candidates_for_size (data, INT_TYPE_SIZE * 2);
2326 /* Adds candidates bases on the old induction variable IV. */
2328 static void
2329 add_old_iv_candidates (struct ivopts_data *data, struct iv *iv)
2331 gimple phi;
2332 tree def;
2333 struct iv_cand *cand;
2335 add_candidate (data, iv->base, iv->step, true, NULL);
2337 /* The same, but with initial value zero. */
2338 if (POINTER_TYPE_P (TREE_TYPE (iv->base)))
2339 add_candidate (data, size_int (0), iv->step, true, NULL);
2340 else
2341 add_candidate (data, build_int_cst (TREE_TYPE (iv->base), 0),
2342 iv->step, true, NULL);
2344 phi = SSA_NAME_DEF_STMT (iv->ssa_name);
2345 if (gimple_code (phi) == GIMPLE_PHI)
2347 /* Additionally record the possibility of leaving the original iv
2348 untouched. */
2349 def = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (data->current_loop));
2350 cand = add_candidate_1 (data,
2351 iv->base, iv->step, true, IP_ORIGINAL, NULL,
2352 SSA_NAME_DEF_STMT (def));
2353 cand->var_before = iv->ssa_name;
2354 cand->var_after = def;
2358 /* Adds candidates based on the old induction variables. */
2360 static void
2361 add_old_ivs_candidates (struct ivopts_data *data)
2363 unsigned i;
2364 struct iv *iv;
2365 bitmap_iterator bi;
2367 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
2369 iv = ver_info (data, i)->iv;
2370 if (iv && iv->biv_p && !integer_zerop (iv->step))
2371 add_old_iv_candidates (data, iv);
2375 /* Adds candidates based on the value of the induction variable IV and USE. */
2377 static void
2378 add_iv_value_candidates (struct ivopts_data *data,
2379 struct iv *iv, struct iv_use *use)
2381 unsigned HOST_WIDE_INT offset;
2382 tree base;
2383 tree basetype;
2385 add_candidate (data, iv->base, iv->step, false, use);
2387 /* The same, but with initial value zero. Make such variable important,
2388 since it is generic enough so that possibly many uses may be based
2389 on it. */
2390 basetype = TREE_TYPE (iv->base);
2391 if (POINTER_TYPE_P (basetype))
2392 basetype = sizetype;
2393 add_candidate (data, build_int_cst (basetype, 0),
2394 iv->step, true, use);
2396 /* Third, try removing the constant offset. Make sure to even
2397 add a candidate for &a[0] vs. (T *)&a. */
2398 base = strip_offset (iv->base, &offset);
2399 if (offset
2400 || base != iv->base)
2401 add_candidate (data, base, iv->step, false, use);
2404 /* Adds candidates based on the uses. */
2406 static void
2407 add_derived_ivs_candidates (struct ivopts_data *data)
2409 unsigned i;
2411 for (i = 0; i < n_iv_uses (data); i++)
2413 struct iv_use *use = iv_use (data, i);
2415 if (!use)
2416 continue;
2418 switch (use->type)
2420 case USE_NONLINEAR_EXPR:
2421 case USE_COMPARE:
2422 case USE_ADDRESS:
2423 /* Just add the ivs based on the value of the iv used here. */
2424 add_iv_value_candidates (data, use->iv, use);
2425 break;
2427 default:
2428 gcc_unreachable ();
2433 /* Record important candidates and add them to related_cands bitmaps
2434 if needed. */
2436 static void
2437 record_important_candidates (struct ivopts_data *data)
2439 unsigned i;
2440 struct iv_use *use;
2442 for (i = 0; i < n_iv_cands (data); i++)
2444 struct iv_cand *cand = iv_cand (data, i);
2446 if (cand->important)
2447 bitmap_set_bit (data->important_candidates, i);
2450 data->consider_all_candidates = (n_iv_cands (data)
2451 <= CONSIDER_ALL_CANDIDATES_BOUND);
2453 if (data->consider_all_candidates)
2455 /* We will not need "related_cands" bitmaps in this case,
2456 so release them to decrease peak memory consumption. */
2457 for (i = 0; i < n_iv_uses (data); i++)
2459 use = iv_use (data, i);
2460 BITMAP_FREE (use->related_cands);
2463 else
2465 /* Add important candidates to the related_cands bitmaps. */
2466 for (i = 0; i < n_iv_uses (data); i++)
2467 bitmap_ior_into (iv_use (data, i)->related_cands,
2468 data->important_candidates);
2472 /* Allocates the data structure mapping the (use, candidate) pairs to costs.
2473 If consider_all_candidates is true, we use a two-dimensional array, otherwise
2474 we allocate a simple list to every use. */
2476 static void
2477 alloc_use_cost_map (struct ivopts_data *data)
2479 unsigned i, size, s, j;
2481 for (i = 0; i < n_iv_uses (data); i++)
2483 struct iv_use *use = iv_use (data, i);
2484 bitmap_iterator bi;
2486 if (data->consider_all_candidates)
2487 size = n_iv_cands (data);
2488 else
2490 s = 0;
2491 EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j, bi)
2493 s++;
2496 /* Round up to the power of two, so that moduling by it is fast. */
2497 for (size = 1; size < s; size <<= 1)
2498 continue;
2501 use->n_map_members = size;
2502 use->cost_map = XCNEWVEC (struct cost_pair, size);
2506 /* Returns description of computation cost of expression whose runtime
2507 cost is RUNTIME and complexity corresponds to COMPLEXITY. */
2509 static comp_cost
2510 new_cost (unsigned runtime, unsigned complexity)
2512 comp_cost cost;
2514 cost.cost = runtime;
2515 cost.complexity = complexity;
2517 return cost;
2520 /* Adds costs COST1 and COST2. */
2522 static comp_cost
2523 add_costs (comp_cost cost1, comp_cost cost2)
2525 cost1.cost += cost2.cost;
2526 cost1.complexity += cost2.complexity;
2528 return cost1;
2530 /* Subtracts costs COST1 and COST2. */
2532 static comp_cost
2533 sub_costs (comp_cost cost1, comp_cost cost2)
2535 cost1.cost -= cost2.cost;
2536 cost1.complexity -= cost2.complexity;
2538 return cost1;
2541 /* Returns a negative number if COST1 < COST2, a positive number if
2542 COST1 > COST2, and 0 if COST1 = COST2. */
2544 static int
2545 compare_costs (comp_cost cost1, comp_cost cost2)
2547 if (cost1.cost == cost2.cost)
2548 return cost1.complexity - cost2.complexity;
2550 return cost1.cost - cost2.cost;
2553 /* Returns true if COST is infinite. */
2555 static bool
2556 infinite_cost_p (comp_cost cost)
2558 return cost.cost == INFTY;
2561 /* Sets cost of (USE, CANDIDATE) pair to COST and record that it depends
2562 on invariants DEPENDS_ON and that the value used in expressing it
2563 is VALUE. */
2565 static void
2566 set_use_iv_cost (struct ivopts_data *data,
2567 struct iv_use *use, struct iv_cand *cand,
2568 comp_cost cost, bitmap depends_on, tree value)
2570 unsigned i, s;
2572 if (infinite_cost_p (cost))
2574 BITMAP_FREE (depends_on);
2575 return;
2578 if (data->consider_all_candidates)
2580 use->cost_map[cand->id].cand = cand;
2581 use->cost_map[cand->id].cost = cost;
2582 use->cost_map[cand->id].depends_on = depends_on;
2583 use->cost_map[cand->id].value = value;
2584 return;
2587 /* n_map_members is a power of two, so this computes modulo. */
2588 s = cand->id & (use->n_map_members - 1);
2589 for (i = s; i < use->n_map_members; i++)
2590 if (!use->cost_map[i].cand)
2591 goto found;
2592 for (i = 0; i < s; i++)
2593 if (!use->cost_map[i].cand)
2594 goto found;
2596 gcc_unreachable ();
2598 found:
2599 use->cost_map[i].cand = cand;
2600 use->cost_map[i].cost = cost;
2601 use->cost_map[i].depends_on = depends_on;
2602 use->cost_map[i].value = value;
2605 /* Gets cost of (USE, CANDIDATE) pair. */
2607 static struct cost_pair *
2608 get_use_iv_cost (struct ivopts_data *data, struct iv_use *use,
2609 struct iv_cand *cand)
2611 unsigned i, s;
2612 struct cost_pair *ret;
2614 if (!cand)
2615 return NULL;
2617 if (data->consider_all_candidates)
2619 ret = use->cost_map + cand->id;
2620 if (!ret->cand)
2621 return NULL;
2623 return ret;
2626 /* n_map_members is a power of two, so this computes modulo. */
2627 s = cand->id & (use->n_map_members - 1);
2628 for (i = s; i < use->n_map_members; i++)
2629 if (use->cost_map[i].cand == cand)
2630 return use->cost_map + i;
2632 for (i = 0; i < s; i++)
2633 if (use->cost_map[i].cand == cand)
2634 return use->cost_map + i;
2636 return NULL;
2639 /* Returns estimate on cost of computing SEQ. */
2641 static unsigned
2642 seq_cost (rtx seq, bool speed)
2644 unsigned cost = 0;
2645 rtx set;
2647 for (; seq; seq = NEXT_INSN (seq))
2649 set = single_set (seq);
2650 if (set)
2651 cost += rtx_cost (set, SET,speed);
2652 else
2653 cost++;
2656 return cost;
2659 /* Produce DECL_RTL for object obj so it looks like it is stored in memory. */
2660 static rtx
2661 produce_memory_decl_rtl (tree obj, int *regno)
2663 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj));
2664 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2665 rtx x;
2667 gcc_assert (obj);
2668 if (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
2670 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2671 x = gen_rtx_SYMBOL_REF (address_mode, name);
2672 SET_SYMBOL_REF_DECL (x, obj);
2673 x = gen_rtx_MEM (DECL_MODE (obj), x);
2674 set_mem_addr_space (x, as);
2675 targetm.encode_section_info (obj, x, true);
2677 else
2679 x = gen_raw_REG (address_mode, (*regno)++);
2680 x = gen_rtx_MEM (DECL_MODE (obj), x);
2681 set_mem_addr_space (x, as);
2684 return x;
2687 /* Prepares decl_rtl for variables referred in *EXPR_P. Callback for
2688 walk_tree. DATA contains the actual fake register number. */
2690 static tree
2691 prepare_decl_rtl (tree *expr_p, int *ws, void *data)
2693 tree obj = NULL_TREE;
2694 rtx x = NULL_RTX;
2695 int *regno = (int *) data;
2697 switch (TREE_CODE (*expr_p))
2699 case ADDR_EXPR:
2700 for (expr_p = &TREE_OPERAND (*expr_p, 0);
2701 handled_component_p (*expr_p);
2702 expr_p = &TREE_OPERAND (*expr_p, 0))
2703 continue;
2704 obj = *expr_p;
2705 if (DECL_P (obj) && !DECL_RTL_SET_P (obj))
2706 x = produce_memory_decl_rtl (obj, regno);
2707 break;
2709 case SSA_NAME:
2710 *ws = 0;
2711 obj = SSA_NAME_VAR (*expr_p);
2712 if (!DECL_RTL_SET_P (obj))
2713 x = gen_raw_REG (DECL_MODE (obj), (*regno)++);
2714 break;
2716 case VAR_DECL:
2717 case PARM_DECL:
2718 case RESULT_DECL:
2719 *ws = 0;
2720 obj = *expr_p;
2722 if (DECL_RTL_SET_P (obj))
2723 break;
2725 if (DECL_MODE (obj) == BLKmode)
2726 x = produce_memory_decl_rtl (obj, regno);
2727 else
2728 x = gen_raw_REG (DECL_MODE (obj), (*regno)++);
2730 break;
2732 default:
2733 break;
2736 if (x)
2738 VEC_safe_push (tree, heap, decl_rtl_to_reset, obj);
2739 SET_DECL_RTL (obj, x);
2742 return NULL_TREE;
2745 /* Determines cost of the computation of EXPR. */
2747 static unsigned
2748 computation_cost (tree expr, bool speed)
2750 rtx seq, rslt;
2751 tree type = TREE_TYPE (expr);
2752 unsigned cost;
2753 /* Avoid using hard regs in ways which may be unsupported. */
2754 int regno = LAST_VIRTUAL_REGISTER + 1;
2755 struct cgraph_node *node = cgraph_node (current_function_decl);
2756 enum node_frequency real_frequency = node->frequency;
2758 node->frequency = NODE_FREQUENCY_NORMAL;
2759 crtl->maybe_hot_insn_p = speed;
2760 walk_tree (&expr, prepare_decl_rtl, &regno, NULL);
2761 start_sequence ();
2762 rslt = expand_expr (expr, NULL_RTX, TYPE_MODE (type), EXPAND_NORMAL);
2763 seq = get_insns ();
2764 end_sequence ();
2765 default_rtl_profile ();
2766 node->frequency = real_frequency;
2768 cost = seq_cost (seq, speed);
2769 if (MEM_P (rslt))
2770 cost += address_cost (XEXP (rslt, 0), TYPE_MODE (type),
2771 TYPE_ADDR_SPACE (type), speed);
2773 return cost;
2776 /* Returns variable containing the value of candidate CAND at statement AT. */
2778 static tree
2779 var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple stmt)
2781 if (stmt_after_increment (loop, cand, stmt))
2782 return cand->var_after;
2783 else
2784 return cand->var_before;
2787 /* Return the most significant (sign) bit of T. Similar to tree_int_cst_msb,
2788 but the bit is determined from TYPE_PRECISION, not MODE_BITSIZE. */
2791 tree_int_cst_sign_bit (const_tree t)
2793 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
2794 unsigned HOST_WIDE_INT w;
2796 if (bitno < HOST_BITS_PER_WIDE_INT)
2797 w = TREE_INT_CST_LOW (t);
2798 else
2800 w = TREE_INT_CST_HIGH (t);
2801 bitno -= HOST_BITS_PER_WIDE_INT;
2804 return (w >> bitno) & 1;
2807 /* If A is (TYPE) BA and B is (TYPE) BB, and the types of BA and BB have the
2808 same precision that is at least as wide as the precision of TYPE, stores
2809 BA to A and BB to B, and returns the type of BA. Otherwise, returns the
2810 type of A and B. */
2812 static tree
2813 determine_common_wider_type (tree *a, tree *b)
2815 tree wider_type = NULL;
2816 tree suba, subb;
2817 tree atype = TREE_TYPE (*a);
2819 if (CONVERT_EXPR_P (*a))
2821 suba = TREE_OPERAND (*a, 0);
2822 wider_type = TREE_TYPE (suba);
2823 if (TYPE_PRECISION (wider_type) < TYPE_PRECISION (atype))
2824 return atype;
2826 else
2827 return atype;
2829 if (CONVERT_EXPR_P (*b))
2831 subb = TREE_OPERAND (*b, 0);
2832 if (TYPE_PRECISION (wider_type) != TYPE_PRECISION (TREE_TYPE (subb)))
2833 return atype;
2835 else
2836 return atype;
2838 *a = suba;
2839 *b = subb;
2840 return wider_type;
2843 /* Determines the expression by that USE is expressed from induction variable
2844 CAND at statement AT in LOOP. The expression is stored in a decomposed
2845 form into AFF. Returns false if USE cannot be expressed using CAND. */
2847 static bool
2848 get_computation_aff (struct loop *loop,
2849 struct iv_use *use, struct iv_cand *cand, gimple at,
2850 struct affine_tree_combination *aff)
2852 tree ubase = use->iv->base;
2853 tree ustep = use->iv->step;
2854 tree cbase = cand->iv->base;
2855 tree cstep = cand->iv->step, cstep_common;
2856 tree utype = TREE_TYPE (ubase), ctype = TREE_TYPE (cbase);
2857 tree common_type, var;
2858 tree uutype;
2859 aff_tree cbase_aff, var_aff;
2860 double_int rat;
2862 if (TYPE_PRECISION (utype) > TYPE_PRECISION (ctype))
2864 /* We do not have a precision to express the values of use. */
2865 return false;
2868 var = var_at_stmt (loop, cand, at);
2869 uutype = unsigned_type_for (utype);
2871 /* If the conversion is not noop, perform it. */
2872 if (TYPE_PRECISION (utype) < TYPE_PRECISION (ctype))
2874 cstep = fold_convert (uutype, cstep);
2875 cbase = fold_convert (uutype, cbase);
2876 var = fold_convert (uutype, var);
2879 if (!constant_multiple_of (ustep, cstep, &rat))
2880 return false;
2882 /* In case both UBASE and CBASE are shortened to UUTYPE from some common
2883 type, we achieve better folding by computing their difference in this
2884 wider type, and cast the result to UUTYPE. We do not need to worry about
2885 overflows, as all the arithmetics will in the end be performed in UUTYPE
2886 anyway. */
2887 common_type = determine_common_wider_type (&ubase, &cbase);
2889 /* use = ubase - ratio * cbase + ratio * var. */
2890 tree_to_aff_combination (ubase, common_type, aff);
2891 tree_to_aff_combination (cbase, common_type, &cbase_aff);
2892 tree_to_aff_combination (var, uutype, &var_aff);
2894 /* We need to shift the value if we are after the increment. */
2895 if (stmt_after_increment (loop, cand, at))
2897 aff_tree cstep_aff;
2899 if (common_type != uutype)
2900 cstep_common = fold_convert (common_type, cstep);
2901 else
2902 cstep_common = cstep;
2904 tree_to_aff_combination (cstep_common, common_type, &cstep_aff);
2905 aff_combination_add (&cbase_aff, &cstep_aff);
2908 aff_combination_scale (&cbase_aff, double_int_neg (rat));
2909 aff_combination_add (aff, &cbase_aff);
2910 if (common_type != uutype)
2911 aff_combination_convert (aff, uutype);
2913 aff_combination_scale (&var_aff, rat);
2914 aff_combination_add (aff, &var_aff);
2916 return true;
2919 /* Determines the expression by that USE is expressed from induction variable
2920 CAND at statement AT in LOOP. The computation is unshared. */
2922 static tree
2923 get_computation_at (struct loop *loop,
2924 struct iv_use *use, struct iv_cand *cand, gimple at)
2926 aff_tree aff;
2927 tree type = TREE_TYPE (use->iv->base);
2929 if (!get_computation_aff (loop, use, cand, at, &aff))
2930 return NULL_TREE;
2931 unshare_aff_combination (&aff);
2932 return fold_convert (type, aff_combination_to_tree (&aff));
2935 /* Determines the expression by that USE is expressed from induction variable
2936 CAND in LOOP. The computation is unshared. */
2938 static tree
2939 get_computation (struct loop *loop, struct iv_use *use, struct iv_cand *cand)
2941 return get_computation_at (loop, use, cand, use->stmt);
2944 /* Adjust the cost COST for being in loop setup rather than loop body.
2945 If we're optimizing for space, the loop setup overhead is constant;
2946 if we're optimizing for speed, amortize it over the per-iteration cost. */
2947 static unsigned
2948 adjust_setup_cost (struct ivopts_data *data, unsigned cost)
2950 if (cost == INFTY)
2951 return cost;
2952 else if (optimize_loop_for_speed_p (data->current_loop))
2953 return cost / AVG_LOOP_NITER (data->current_loop);
2954 else
2955 return cost;
2958 /* Returns cost of addition in MODE. */
2960 static unsigned
2961 add_cost (enum machine_mode mode, bool speed)
2963 static unsigned costs[NUM_MACHINE_MODES];
2964 rtx seq;
2965 unsigned cost;
2967 if (costs[mode])
2968 return costs[mode];
2970 start_sequence ();
2971 force_operand (gen_rtx_fmt_ee (PLUS, mode,
2972 gen_raw_REG (mode, LAST_VIRTUAL_REGISTER + 1),
2973 gen_raw_REG (mode, LAST_VIRTUAL_REGISTER + 2)),
2974 NULL_RTX);
2975 seq = get_insns ();
2976 end_sequence ();
2978 cost = seq_cost (seq, speed);
2979 if (!cost)
2980 cost = 1;
2982 costs[mode] = cost;
2984 if (dump_file && (dump_flags & TDF_DETAILS))
2985 fprintf (dump_file, "Addition in %s costs %d\n",
2986 GET_MODE_NAME (mode), cost);
2987 return cost;
2990 /* Entry in a hashtable of already known costs for multiplication. */
2991 struct mbc_entry
2993 HOST_WIDE_INT cst; /* The constant to multiply by. */
2994 enum machine_mode mode; /* In mode. */
2995 unsigned cost; /* The cost. */
2998 /* Counts hash value for the ENTRY. */
3000 static hashval_t
3001 mbc_entry_hash (const void *entry)
3003 const struct mbc_entry *e = (const struct mbc_entry *) entry;
3005 return 57 * (hashval_t) e->mode + (hashval_t) (e->cst % 877);
3008 /* Compares the hash table entries ENTRY1 and ENTRY2. */
3010 static int
3011 mbc_entry_eq (const void *entry1, const void *entry2)
3013 const struct mbc_entry *e1 = (const struct mbc_entry *) entry1;
3014 const struct mbc_entry *e2 = (const struct mbc_entry *) entry2;
3016 return (e1->mode == e2->mode
3017 && e1->cst == e2->cst);
3020 /* Returns cost of multiplication by constant CST in MODE. */
3022 unsigned
3023 multiply_by_cost (HOST_WIDE_INT cst, enum machine_mode mode, bool speed)
3025 static htab_t costs;
3026 struct mbc_entry **cached, act;
3027 rtx seq;
3028 unsigned cost;
3030 if (!costs)
3031 costs = htab_create (100, mbc_entry_hash, mbc_entry_eq, free);
3033 act.mode = mode;
3034 act.cst = cst;
3035 cached = (struct mbc_entry **) htab_find_slot (costs, &act, INSERT);
3036 if (*cached)
3037 return (*cached)->cost;
3039 *cached = XNEW (struct mbc_entry);
3040 (*cached)->mode = mode;
3041 (*cached)->cst = cst;
3043 start_sequence ();
3044 expand_mult (mode, gen_raw_REG (mode, LAST_VIRTUAL_REGISTER + 1),
3045 gen_int_mode (cst, mode), NULL_RTX, 0);
3046 seq = get_insns ();
3047 end_sequence ();
3049 cost = seq_cost (seq, speed);
3051 if (dump_file && (dump_flags & TDF_DETAILS))
3052 fprintf (dump_file, "Multiplication by %d in %s costs %d\n",
3053 (int) cst, GET_MODE_NAME (mode), cost);
3055 (*cached)->cost = cost;
3057 return cost;
3060 /* Returns true if multiplying by RATIO is allowed in an address. Test the
3061 validity for a memory reference accessing memory of mode MODE in
3062 address space AS. */
3064 DEF_VEC_P (sbitmap);
3065 DEF_VEC_ALLOC_P (sbitmap, heap);
3067 bool
3068 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
3069 addr_space_t as)
3071 #define MAX_RATIO 128
3072 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
3073 static VEC (sbitmap, heap) *valid_mult_list;
3074 sbitmap valid_mult;
3076 if (data_index >= VEC_length (sbitmap, valid_mult_list))
3077 VEC_safe_grow_cleared (sbitmap, heap, valid_mult_list, data_index + 1);
3079 valid_mult = VEC_index (sbitmap, valid_mult_list, data_index);
3080 if (!valid_mult)
3082 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
3083 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
3084 rtx addr;
3085 HOST_WIDE_INT i;
3087 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
3088 sbitmap_zero (valid_mult);
3089 addr = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
3090 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
3092 XEXP (addr, 1) = gen_int_mode (i, address_mode);
3093 if (memory_address_addr_space_p (mode, addr, as))
3094 SET_BIT (valid_mult, i + MAX_RATIO);
3097 if (dump_file && (dump_flags & TDF_DETAILS))
3099 fprintf (dump_file, " allowed multipliers:");
3100 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
3101 if (TEST_BIT (valid_mult, i + MAX_RATIO))
3102 fprintf (dump_file, " %d", (int) i);
3103 fprintf (dump_file, "\n");
3104 fprintf (dump_file, "\n");
3107 VEC_replace (sbitmap, valid_mult_list, data_index, valid_mult);
3110 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
3111 return false;
3113 return TEST_BIT (valid_mult, ratio + MAX_RATIO);
3116 /* Returns cost of address in shape symbol + var + OFFSET + RATIO * index.
3117 If SYMBOL_PRESENT is false, symbol is omitted. If VAR_PRESENT is false,
3118 variable is omitted. Compute the cost for a memory reference that accesses
3119 a memory location of mode MEM_MODE in address space AS.
3121 MAY_AUTOINC is set to true if the autoincrement (increasing index by
3122 size of MEM_MODE / RATIO) is available. To make this determination, we
3123 look at the size of the increment to be made, which is given in CSTEP.
3124 CSTEP may be zero if the step is unknown.
3125 STMT_AFTER_INC is true iff the statement we're looking at is after the
3126 increment of the original biv.
3128 TODO -- there must be some better way. This all is quite crude. */
3130 typedef struct
3132 HOST_WIDE_INT min_offset, max_offset;
3133 unsigned costs[2][2][2][2];
3134 } *address_cost_data;
3136 DEF_VEC_P (address_cost_data);
3137 DEF_VEC_ALLOC_P (address_cost_data, heap);
3139 static comp_cost
3140 get_address_cost (bool symbol_present, bool var_present,
3141 unsigned HOST_WIDE_INT offset, HOST_WIDE_INT ratio,
3142 HOST_WIDE_INT cstep, enum machine_mode mem_mode,
3143 addr_space_t as, bool speed,
3144 bool stmt_after_inc, bool *may_autoinc)
3146 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
3147 static VEC(address_cost_data, heap) *address_cost_data_list;
3148 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mem_mode;
3149 address_cost_data data;
3150 static bool has_preinc[MAX_MACHINE_MODE], has_postinc[MAX_MACHINE_MODE];
3151 static bool has_predec[MAX_MACHINE_MODE], has_postdec[MAX_MACHINE_MODE];
3152 unsigned cost, acost, complexity;
3153 bool offset_p, ratio_p, autoinc;
3154 HOST_WIDE_INT s_offset, autoinc_offset, msize;
3155 unsigned HOST_WIDE_INT mask;
3156 unsigned bits;
3158 if (data_index >= VEC_length (address_cost_data, address_cost_data_list))
3159 VEC_safe_grow_cleared (address_cost_data, heap, address_cost_data_list,
3160 data_index + 1);
3162 data = VEC_index (address_cost_data, address_cost_data_list, data_index);
3163 if (!data)
3165 HOST_WIDE_INT i;
3166 HOST_WIDE_INT start = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
3167 HOST_WIDE_INT rat, off;
3168 int old_cse_not_expected;
3169 unsigned sym_p, var_p, off_p, rat_p, add_c;
3170 rtx seq, addr, base;
3171 rtx reg0, reg1;
3173 data = (address_cost_data) xcalloc (1, sizeof (*data));
3175 reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
3177 addr = gen_rtx_fmt_ee (PLUS, address_mode, reg1, NULL_RTX);
3178 for (i = start; i <= 1 << 20; i <<= 1)
3180 XEXP (addr, 1) = gen_int_mode (i, address_mode);
3181 if (!memory_address_addr_space_p (mem_mode, addr, as))
3182 break;
3184 data->max_offset = i == start ? 0 : i >> 1;
3185 off = data->max_offset;
3187 for (i = start; i <= 1 << 20; i <<= 1)
3189 XEXP (addr, 1) = gen_int_mode (-i, address_mode);
3190 if (!memory_address_addr_space_p (mem_mode, addr, as))
3191 break;
3193 data->min_offset = i == start ? 0 : -(i >> 1);
3195 if (dump_file && (dump_flags & TDF_DETAILS))
3197 fprintf (dump_file, "get_address_cost:\n");
3198 fprintf (dump_file, " min offset %s %d\n",
3199 GET_MODE_NAME (mem_mode),
3200 (int) data->min_offset);
3201 fprintf (dump_file, " max offset %s %d\n",
3202 GET_MODE_NAME (mem_mode),
3203 (int) data->max_offset);
3206 rat = 1;
3207 for (i = 2; i <= MAX_RATIO; i++)
3208 if (multiplier_allowed_in_address_p (i, mem_mode, as))
3210 rat = i;
3211 break;
3214 /* Compute the cost of various addressing modes. */
3215 acost = 0;
3216 reg0 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
3217 reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
3219 if (HAVE_PRE_DECREMENT)
3221 addr = gen_rtx_PRE_DEC (address_mode, reg0);
3222 has_predec[mem_mode]
3223 = memory_address_addr_space_p (mem_mode, addr, as);
3225 if (HAVE_POST_DECREMENT)
3227 addr = gen_rtx_POST_DEC (address_mode, reg0);
3228 has_postdec[mem_mode]
3229 = memory_address_addr_space_p (mem_mode, addr, as);
3231 if (HAVE_PRE_INCREMENT)
3233 addr = gen_rtx_PRE_INC (address_mode, reg0);
3234 has_preinc[mem_mode]
3235 = memory_address_addr_space_p (mem_mode, addr, as);
3237 if (HAVE_POST_INCREMENT)
3239 addr = gen_rtx_POST_INC (address_mode, reg0);
3240 has_postinc[mem_mode]
3241 = memory_address_addr_space_p (mem_mode, addr, as);
3243 for (i = 0; i < 16; i++)
3245 sym_p = i & 1;
3246 var_p = (i >> 1) & 1;
3247 off_p = (i >> 2) & 1;
3248 rat_p = (i >> 3) & 1;
3250 addr = reg0;
3251 if (rat_p)
3252 addr = gen_rtx_fmt_ee (MULT, address_mode, addr,
3253 gen_int_mode (rat, address_mode));
3255 if (var_p)
3256 addr = gen_rtx_fmt_ee (PLUS, address_mode, addr, reg1);
3258 if (sym_p)
3260 base = gen_rtx_SYMBOL_REF (address_mode, ggc_strdup (""));
3261 /* ??? We can run into trouble with some backends by presenting
3262 it with symbols which haven't been properly passed through
3263 targetm.encode_section_info. By setting the local bit, we
3264 enhance the probability of things working. */
3265 SYMBOL_REF_FLAGS (base) = SYMBOL_FLAG_LOCAL;
3267 if (off_p)
3268 base = gen_rtx_fmt_e (CONST, address_mode,
3269 gen_rtx_fmt_ee
3270 (PLUS, address_mode, base,
3271 gen_int_mode (off, address_mode)));
3273 else if (off_p)
3274 base = gen_int_mode (off, address_mode);
3275 else
3276 base = NULL_RTX;
3278 if (base)
3279 addr = gen_rtx_fmt_ee (PLUS, address_mode, addr, base);
3281 start_sequence ();
3282 /* To avoid splitting addressing modes, pretend that no cse will
3283 follow. */
3284 old_cse_not_expected = cse_not_expected;
3285 cse_not_expected = true;
3286 addr = memory_address_addr_space (mem_mode, addr, as);
3287 cse_not_expected = old_cse_not_expected;
3288 seq = get_insns ();
3289 end_sequence ();
3291 acost = seq_cost (seq, speed);
3292 acost += address_cost (addr, mem_mode, as, speed);
3294 if (!acost)
3295 acost = 1;
3296 data->costs[sym_p][var_p][off_p][rat_p] = acost;
3299 /* On some targets, it is quite expensive to load symbol to a register,
3300 which makes addresses that contain symbols look much more expensive.
3301 However, the symbol will have to be loaded in any case before the
3302 loop (and quite likely we have it in register already), so it does not
3303 make much sense to penalize them too heavily. So make some final
3304 tweaks for the SYMBOL_PRESENT modes:
3306 If VAR_PRESENT is false, and the mode obtained by changing symbol to
3307 var is cheaper, use this mode with small penalty.
3308 If VAR_PRESENT is true, try whether the mode with
3309 SYMBOL_PRESENT = false is cheaper even with cost of addition, and
3310 if this is the case, use it. */
3311 add_c = add_cost (address_mode, speed);
3312 for (i = 0; i < 8; i++)
3314 var_p = i & 1;
3315 off_p = (i >> 1) & 1;
3316 rat_p = (i >> 2) & 1;
3318 acost = data->costs[0][1][off_p][rat_p] + 1;
3319 if (var_p)
3320 acost += add_c;
3322 if (acost < data->costs[1][var_p][off_p][rat_p])
3323 data->costs[1][var_p][off_p][rat_p] = acost;
3326 if (dump_file && (dump_flags & TDF_DETAILS))
3328 fprintf (dump_file, "Address costs:\n");
3330 for (i = 0; i < 16; i++)
3332 sym_p = i & 1;
3333 var_p = (i >> 1) & 1;
3334 off_p = (i >> 2) & 1;
3335 rat_p = (i >> 3) & 1;
3337 fprintf (dump_file, " ");
3338 if (sym_p)
3339 fprintf (dump_file, "sym + ");
3340 if (var_p)
3341 fprintf (dump_file, "var + ");
3342 if (off_p)
3343 fprintf (dump_file, "cst + ");
3344 if (rat_p)
3345 fprintf (dump_file, "rat * ");
3347 acost = data->costs[sym_p][var_p][off_p][rat_p];
3348 fprintf (dump_file, "index costs %d\n", acost);
3350 if (has_predec[mem_mode] || has_postdec[mem_mode]
3351 || has_preinc[mem_mode] || has_postinc[mem_mode])
3352 fprintf (dump_file, " May include autoinc/dec\n");
3353 fprintf (dump_file, "\n");
3356 VEC_replace (address_cost_data, address_cost_data_list,
3357 data_index, data);
3360 bits = GET_MODE_BITSIZE (address_mode);
3361 mask = ~(~(unsigned HOST_WIDE_INT) 0 << (bits - 1) << 1);
3362 offset &= mask;
3363 if ((offset >> (bits - 1) & 1))
3364 offset |= ~mask;
3365 s_offset = offset;
3367 autoinc = false;
3368 msize = GET_MODE_SIZE (mem_mode);
3369 autoinc_offset = offset;
3370 if (stmt_after_inc)
3371 autoinc_offset += ratio * cstep;
3372 if (symbol_present || var_present || ratio != 1)
3373 autoinc = false;
3374 else if ((has_postinc[mem_mode] && autoinc_offset == 0
3375 && msize == cstep)
3376 || (has_postdec[mem_mode] && autoinc_offset == 0
3377 && msize == -cstep)
3378 || (has_preinc[mem_mode] && autoinc_offset == msize
3379 && msize == cstep)
3380 || (has_predec[mem_mode] && autoinc_offset == -msize
3381 && msize == -cstep))
3382 autoinc = true;
3384 cost = 0;
3385 offset_p = (s_offset != 0
3386 && data->min_offset <= s_offset
3387 && s_offset <= data->max_offset);
3388 ratio_p = (ratio != 1
3389 && multiplier_allowed_in_address_p (ratio, mem_mode, as));
3391 if (ratio != 1 && !ratio_p)
3392 cost += multiply_by_cost (ratio, address_mode, speed);
3394 if (s_offset && !offset_p && !symbol_present)
3395 cost += add_cost (address_mode, speed);
3397 if (may_autoinc)
3398 *may_autoinc = autoinc;
3399 acost = data->costs[symbol_present][var_present][offset_p][ratio_p];
3400 complexity = (symbol_present != 0) + (var_present != 0) + offset_p + ratio_p;
3401 return new_cost (cost + acost, complexity);
3404 /* Estimates cost of forcing expression EXPR into a variable. */
3406 static comp_cost
3407 force_expr_to_var_cost (tree expr, bool speed)
3409 static bool costs_initialized = false;
3410 static unsigned integer_cost [2];
3411 static unsigned symbol_cost [2];
3412 static unsigned address_cost [2];
3413 tree op0, op1;
3414 comp_cost cost0, cost1, cost;
3415 enum machine_mode mode;
3417 if (!costs_initialized)
3419 tree type = build_pointer_type (integer_type_node);
3420 tree var, addr;
3421 rtx x;
3422 int i;
3424 var = create_tmp_var_raw (integer_type_node, "test_var");
3425 TREE_STATIC (var) = 1;
3426 x = produce_memory_decl_rtl (var, NULL);
3427 SET_DECL_RTL (var, x);
3429 addr = build1 (ADDR_EXPR, type, var);
3432 for (i = 0; i < 2; i++)
3434 integer_cost[i] = computation_cost (build_int_cst (integer_type_node,
3435 2000), i);
3437 symbol_cost[i] = computation_cost (addr, i) + 1;
3439 address_cost[i]
3440 = computation_cost (build2 (POINTER_PLUS_EXPR, type,
3441 addr,
3442 build_int_cst (sizetype, 2000)), i) + 1;
3443 if (dump_file && (dump_flags & TDF_DETAILS))
3445 fprintf (dump_file, "force_expr_to_var_cost %s costs:\n", i ? "speed" : "size");
3446 fprintf (dump_file, " integer %d\n", (int) integer_cost[i]);
3447 fprintf (dump_file, " symbol %d\n", (int) symbol_cost[i]);
3448 fprintf (dump_file, " address %d\n", (int) address_cost[i]);
3449 fprintf (dump_file, " other %d\n", (int) target_spill_cost[i]);
3450 fprintf (dump_file, "\n");
3454 costs_initialized = true;
3457 STRIP_NOPS (expr);
3459 if (SSA_VAR_P (expr))
3460 return zero_cost;
3462 if (is_gimple_min_invariant (expr))
3464 if (TREE_CODE (expr) == INTEGER_CST)
3465 return new_cost (integer_cost [speed], 0);
3467 if (TREE_CODE (expr) == ADDR_EXPR)
3469 tree obj = TREE_OPERAND (expr, 0);
3471 if (TREE_CODE (obj) == VAR_DECL
3472 || TREE_CODE (obj) == PARM_DECL
3473 || TREE_CODE (obj) == RESULT_DECL)
3474 return new_cost (symbol_cost [speed], 0);
3477 return new_cost (address_cost [speed], 0);
3480 switch (TREE_CODE (expr))
3482 case POINTER_PLUS_EXPR:
3483 case PLUS_EXPR:
3484 case MINUS_EXPR:
3485 case MULT_EXPR:
3486 op0 = TREE_OPERAND (expr, 0);
3487 op1 = TREE_OPERAND (expr, 1);
3488 STRIP_NOPS (op0);
3489 STRIP_NOPS (op1);
3491 if (is_gimple_val (op0))
3492 cost0 = zero_cost;
3493 else
3494 cost0 = force_expr_to_var_cost (op0, speed);
3496 if (is_gimple_val (op1))
3497 cost1 = zero_cost;
3498 else
3499 cost1 = force_expr_to_var_cost (op1, speed);
3501 break;
3503 case NEGATE_EXPR:
3504 op0 = TREE_OPERAND (expr, 0);
3505 STRIP_NOPS (op0);
3506 op1 = NULL_TREE;
3508 if (is_gimple_val (op0))
3509 cost0 = zero_cost;
3510 else
3511 cost0 = force_expr_to_var_cost (op0, speed);
3513 cost1 = zero_cost;
3514 break;
3516 default:
3517 /* Just an arbitrary value, FIXME. */
3518 return new_cost (target_spill_cost[speed], 0);
3521 mode = TYPE_MODE (TREE_TYPE (expr));
3522 switch (TREE_CODE (expr))
3524 case POINTER_PLUS_EXPR:
3525 case PLUS_EXPR:
3526 case MINUS_EXPR:
3527 case NEGATE_EXPR:
3528 cost = new_cost (add_cost (mode, speed), 0);
3529 break;
3531 case MULT_EXPR:
3532 if (cst_and_fits_in_hwi (op0))
3533 cost = new_cost (multiply_by_cost (int_cst_value (op0), mode, speed), 0);
3534 else if (cst_and_fits_in_hwi (op1))
3535 cost = new_cost (multiply_by_cost (int_cst_value (op1), mode, speed), 0);
3536 else
3537 return new_cost (target_spill_cost [speed], 0);
3538 break;
3540 default:
3541 gcc_unreachable ();
3544 cost = add_costs (cost, cost0);
3545 cost = add_costs (cost, cost1);
3547 /* Bound the cost by target_spill_cost. The parts of complicated
3548 computations often are either loop invariant or at least can
3549 be shared between several iv uses, so letting this grow without
3550 limits would not give reasonable results. */
3551 if (cost.cost > (int) target_spill_cost [speed])
3552 cost.cost = target_spill_cost [speed];
3554 return cost;
3557 /* Estimates cost of forcing EXPR into a variable. DEPENDS_ON is a set of the
3558 invariants the computation depends on. */
3560 static comp_cost
3561 force_var_cost (struct ivopts_data *data,
3562 tree expr, bitmap *depends_on)
3564 if (depends_on)
3566 fd_ivopts_data = data;
3567 walk_tree (&expr, find_depends, depends_on, NULL);
3570 return force_expr_to_var_cost (expr, data->speed);
3573 /* Estimates cost of expressing address ADDR as var + symbol + offset. The
3574 value of offset is added to OFFSET, SYMBOL_PRESENT and VAR_PRESENT are set
3575 to false if the corresponding part is missing. DEPENDS_ON is a set of the
3576 invariants the computation depends on. */
3578 static comp_cost
3579 split_address_cost (struct ivopts_data *data,
3580 tree addr, bool *symbol_present, bool *var_present,
3581 unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
3583 tree core;
3584 HOST_WIDE_INT bitsize;
3585 HOST_WIDE_INT bitpos;
3586 tree toffset;
3587 enum machine_mode mode;
3588 int unsignedp, volatilep;
3590 core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
3591 &unsignedp, &volatilep, false);
3593 if (toffset != 0
3594 || bitpos % BITS_PER_UNIT != 0
3595 || TREE_CODE (core) != VAR_DECL)
3597 *symbol_present = false;
3598 *var_present = true;
3599 fd_ivopts_data = data;
3600 walk_tree (&addr, find_depends, depends_on, NULL);
3601 return new_cost (target_spill_cost[data->speed], 0);
3604 *offset += bitpos / BITS_PER_UNIT;
3605 if (TREE_STATIC (core)
3606 || DECL_EXTERNAL (core))
3608 *symbol_present = true;
3609 *var_present = false;
3610 return zero_cost;
3613 *symbol_present = false;
3614 *var_present = true;
3615 return zero_cost;
3618 /* Estimates cost of expressing difference of addresses E1 - E2 as
3619 var + symbol + offset. The value of offset is added to OFFSET,
3620 SYMBOL_PRESENT and VAR_PRESENT are set to false if the corresponding
3621 part is missing. DEPENDS_ON is a set of the invariants the computation
3622 depends on. */
3624 static comp_cost
3625 ptr_difference_cost (struct ivopts_data *data,
3626 tree e1, tree e2, bool *symbol_present, bool *var_present,
3627 unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
3629 HOST_WIDE_INT diff = 0;
3630 aff_tree aff_e1, aff_e2;
3631 tree type;
3633 gcc_assert (TREE_CODE (e1) == ADDR_EXPR);
3635 if (ptr_difference_const (e1, e2, &diff))
3637 *offset += diff;
3638 *symbol_present = false;
3639 *var_present = false;
3640 return zero_cost;
3643 if (integer_zerop (e2))
3644 return split_address_cost (data, TREE_OPERAND (e1, 0),
3645 symbol_present, var_present, offset, depends_on);
3647 *symbol_present = false;
3648 *var_present = true;
3650 type = signed_type_for (TREE_TYPE (e1));
3651 tree_to_aff_combination (e1, type, &aff_e1);
3652 tree_to_aff_combination (e2, type, &aff_e2);
3653 aff_combination_scale (&aff_e2, double_int_minus_one);
3654 aff_combination_add (&aff_e1, &aff_e2);
3656 return force_var_cost (data, aff_combination_to_tree (&aff_e1), depends_on);
3659 /* Estimates cost of expressing difference E1 - E2 as
3660 var + symbol + offset. The value of offset is added to OFFSET,
3661 SYMBOL_PRESENT and VAR_PRESENT are set to false if the corresponding
3662 part is missing. DEPENDS_ON is a set of the invariants the computation
3663 depends on. */
3665 static comp_cost
3666 difference_cost (struct ivopts_data *data,
3667 tree e1, tree e2, bool *symbol_present, bool *var_present,
3668 unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
3670 enum machine_mode mode = TYPE_MODE (TREE_TYPE (e1));
3671 unsigned HOST_WIDE_INT off1, off2;
3672 aff_tree aff_e1, aff_e2;
3673 tree type;
3675 e1 = strip_offset (e1, &off1);
3676 e2 = strip_offset (e2, &off2);
3677 *offset += off1 - off2;
3679 STRIP_NOPS (e1);
3680 STRIP_NOPS (e2);
3682 if (TREE_CODE (e1) == ADDR_EXPR)
3683 return ptr_difference_cost (data, e1, e2, symbol_present, var_present,
3684 offset, depends_on);
3685 *symbol_present = false;
3687 if (operand_equal_p (e1, e2, 0))
3689 *var_present = false;
3690 return zero_cost;
3693 *var_present = true;
3695 if (integer_zerop (e2))
3696 return force_var_cost (data, e1, depends_on);
3698 if (integer_zerop (e1))
3700 comp_cost cost = force_var_cost (data, e2, depends_on);
3701 cost.cost += multiply_by_cost (-1, mode, data->speed);
3702 return cost;
3705 type = signed_type_for (TREE_TYPE (e1));
3706 tree_to_aff_combination (e1, type, &aff_e1);
3707 tree_to_aff_combination (e2, type, &aff_e2);
3708 aff_combination_scale (&aff_e2, double_int_minus_one);
3709 aff_combination_add (&aff_e1, &aff_e2);
3711 return force_var_cost (data, aff_combination_to_tree (&aff_e1), depends_on);
3714 /* Determines the cost of the computation by that USE is expressed
3715 from induction variable CAND. If ADDRESS_P is true, we just need
3716 to create an address from it, otherwise we want to get it into
3717 register. A set of invariants we depend on is stored in
3718 DEPENDS_ON. AT is the statement at that the value is computed.
3719 If CAN_AUTOINC is nonnull, use it to record whether autoinc
3720 addressing is likely. */
3722 static comp_cost
3723 get_computation_cost_at (struct ivopts_data *data,
3724 struct iv_use *use, struct iv_cand *cand,
3725 bool address_p, bitmap *depends_on, gimple at,
3726 bool *can_autoinc)
3728 tree ubase = use->iv->base, ustep = use->iv->step;
3729 tree cbase, cstep;
3730 tree utype = TREE_TYPE (ubase), ctype;
3731 unsigned HOST_WIDE_INT cstepi, offset = 0;
3732 HOST_WIDE_INT ratio, aratio;
3733 bool var_present, symbol_present, stmt_is_after_inc;
3734 comp_cost cost;
3735 double_int rat;
3736 bool speed = optimize_bb_for_speed_p (gimple_bb (at));
3738 *depends_on = NULL;
3740 /* Only consider real candidates. */
3741 if (!cand->iv)
3742 return infinite_cost;
3744 cbase = cand->iv->base;
3745 cstep = cand->iv->step;
3746 ctype = TREE_TYPE (cbase);
3748 if (TYPE_PRECISION (utype) > TYPE_PRECISION (ctype))
3750 /* We do not have a precision to express the values of use. */
3751 return infinite_cost;
3754 if (address_p)
3756 /* Do not try to express address of an object with computation based
3757 on address of a different object. This may cause problems in rtl
3758 level alias analysis (that does not expect this to be happening,
3759 as this is illegal in C), and would be unlikely to be useful
3760 anyway. */
3761 if (use->iv->base_object
3762 && cand->iv->base_object
3763 && !operand_equal_p (use->iv->base_object, cand->iv->base_object, 0))
3764 return infinite_cost;
3767 if (TYPE_PRECISION (utype) < TYPE_PRECISION (ctype))
3769 /* TODO -- add direct handling of this case. */
3770 goto fallback;
3773 /* CSTEPI is removed from the offset in case statement is after the
3774 increment. If the step is not constant, we use zero instead.
3775 This is a bit imprecise (there is the extra addition), but
3776 redundancy elimination is likely to transform the code so that
3777 it uses value of the variable before increment anyway,
3778 so it is not that much unrealistic. */
3779 if (cst_and_fits_in_hwi (cstep))
3780 cstepi = int_cst_value (cstep);
3781 else
3782 cstepi = 0;
3784 if (!constant_multiple_of (ustep, cstep, &rat))
3785 return infinite_cost;
3787 if (double_int_fits_in_shwi_p (rat))
3788 ratio = double_int_to_shwi (rat);
3789 else
3790 return infinite_cost;
3792 STRIP_NOPS (cbase);
3793 ctype = TREE_TYPE (cbase);
3795 /* use = ubase + ratio * (var - cbase). If either cbase is a constant
3796 or ratio == 1, it is better to handle this like
3798 ubase - ratio * cbase + ratio * var
3800 (also holds in the case ratio == -1, TODO. */
3802 if (cst_and_fits_in_hwi (cbase))
3804 offset = - ratio * int_cst_value (cbase);
3805 cost = difference_cost (data,
3806 ubase, build_int_cst (utype, 0),
3807 &symbol_present, &var_present, &offset,
3808 depends_on);
3810 else if (ratio == 1)
3812 cost = difference_cost (data,
3813 ubase, cbase,
3814 &symbol_present, &var_present, &offset,
3815 depends_on);
3817 else if (address_p
3818 && !POINTER_TYPE_P (ctype)
3819 && multiplier_allowed_in_address_p
3820 (ratio, TYPE_MODE (TREE_TYPE (utype)),
3821 TYPE_ADDR_SPACE (TREE_TYPE (utype))))
3823 cbase
3824 = fold_build2 (MULT_EXPR, ctype, cbase, build_int_cst (ctype, ratio));
3825 cost = difference_cost (data,
3826 ubase, cbase,
3827 &symbol_present, &var_present, &offset,
3828 depends_on);
3830 else
3832 cost = force_var_cost (data, cbase, depends_on);
3833 cost.cost += add_cost (TYPE_MODE (ctype), data->speed);
3834 cost = add_costs (cost,
3835 difference_cost (data,
3836 ubase, build_int_cst (utype, 0),
3837 &symbol_present, &var_present,
3838 &offset, depends_on));
3841 /* If we are after the increment, the value of the candidate is higher by
3842 one iteration. */
3843 stmt_is_after_inc = stmt_after_increment (data->current_loop, cand, at);
3844 if (stmt_is_after_inc)
3845 offset -= ratio * cstepi;
3847 /* Now the computation is in shape symbol + var1 + const + ratio * var2.
3848 (symbol/var1/const parts may be omitted). If we are looking for an
3849 address, find the cost of addressing this. */
3850 if (address_p)
3851 return add_costs (cost,
3852 get_address_cost (symbol_present, var_present,
3853 offset, ratio, cstepi,
3854 TYPE_MODE (TREE_TYPE (utype)),
3855 TYPE_ADDR_SPACE (TREE_TYPE (utype)),
3856 speed, stmt_is_after_inc,
3857 can_autoinc));
3859 /* Otherwise estimate the costs for computing the expression. */
3860 if (!symbol_present && !var_present && !offset)
3862 if (ratio != 1)
3863 cost.cost += multiply_by_cost (ratio, TYPE_MODE (ctype), speed);
3864 return cost;
3867 /* Symbol + offset should be compile-time computable so consider that they
3868 are added once to the variable, if present. */
3869 if (var_present && (symbol_present || offset))
3870 cost.cost += adjust_setup_cost (data,
3871 add_cost (TYPE_MODE (ctype), speed));
3873 /* Having offset does not affect runtime cost in case it is added to
3874 symbol, but it increases complexity. */
3875 if (offset)
3876 cost.complexity++;
3878 cost.cost += add_cost (TYPE_MODE (ctype), speed);
3880 aratio = ratio > 0 ? ratio : -ratio;
3881 if (aratio != 1)
3882 cost.cost += multiply_by_cost (aratio, TYPE_MODE (ctype), speed);
3883 return cost;
3885 fallback:
3886 if (can_autoinc)
3887 *can_autoinc = false;
3890 /* Just get the expression, expand it and measure the cost. */
3891 tree comp = get_computation_at (data->current_loop, use, cand, at);
3893 if (!comp)
3894 return infinite_cost;
3896 if (address_p)
3897 comp = build_simple_mem_ref (comp);
3899 return new_cost (computation_cost (comp, speed), 0);
3903 /* Determines the cost of the computation by that USE is expressed
3904 from induction variable CAND. If ADDRESS_P is true, we just need
3905 to create an address from it, otherwise we want to get it into
3906 register. A set of invariants we depend on is stored in
3907 DEPENDS_ON. If CAN_AUTOINC is nonnull, use it to record whether
3908 autoinc addressing is likely. */
3910 static comp_cost
3911 get_computation_cost (struct ivopts_data *data,
3912 struct iv_use *use, struct iv_cand *cand,
3913 bool address_p, bitmap *depends_on, bool *can_autoinc)
3915 return get_computation_cost_at (data,
3916 use, cand, address_p, depends_on, use->stmt,
3917 can_autoinc);
3920 /* Determines cost of basing replacement of USE on CAND in a generic
3921 expression. */
3923 static bool
3924 determine_use_iv_cost_generic (struct ivopts_data *data,
3925 struct iv_use *use, struct iv_cand *cand)
3927 bitmap depends_on;
3928 comp_cost cost;
3930 /* The simple case first -- if we need to express value of the preserved
3931 original biv, the cost is 0. This also prevents us from counting the
3932 cost of increment twice -- once at this use and once in the cost of
3933 the candidate. */
3934 if (cand->pos == IP_ORIGINAL
3935 && cand->incremented_at == use->stmt)
3937 set_use_iv_cost (data, use, cand, zero_cost, NULL, NULL_TREE);
3938 return true;
3941 cost = get_computation_cost (data, use, cand, false, &depends_on, NULL);
3942 set_use_iv_cost (data, use, cand, cost, depends_on, NULL_TREE);
3944 return !infinite_cost_p (cost);
3947 /* Determines cost of basing replacement of USE on CAND in an address. */
3949 static bool
3950 determine_use_iv_cost_address (struct ivopts_data *data,
3951 struct iv_use *use, struct iv_cand *cand)
3953 bitmap depends_on;
3954 bool can_autoinc;
3955 comp_cost cost = get_computation_cost (data, use, cand, true, &depends_on,
3956 &can_autoinc);
3958 if (cand->ainc_use == use)
3960 if (can_autoinc)
3961 cost.cost -= cand->cost_step;
3962 /* If we generated the candidate solely for exploiting autoincrement
3963 opportunities, and it turns out it can't be used, set the cost to
3964 infinity to make sure we ignore it. */
3965 else if (cand->pos == IP_AFTER_USE || cand->pos == IP_BEFORE_USE)
3966 cost = infinite_cost;
3968 set_use_iv_cost (data, use, cand, cost, depends_on, NULL_TREE);
3970 return !infinite_cost_p (cost);
3973 /* Computes value of candidate CAND at position AT in iteration NITER, and
3974 stores it to VAL. */
3976 static void
3977 cand_value_at (struct loop *loop, struct iv_cand *cand, gimple at, tree niter,
3978 aff_tree *val)
3980 aff_tree step, delta, nit;
3981 struct iv *iv = cand->iv;
3982 tree type = TREE_TYPE (iv->base);
3983 tree steptype = type;
3984 if (POINTER_TYPE_P (type))
3985 steptype = sizetype;
3987 tree_to_aff_combination (iv->step, steptype, &step);
3988 tree_to_aff_combination (niter, TREE_TYPE (niter), &nit);
3989 aff_combination_convert (&nit, steptype);
3990 aff_combination_mult (&nit, &step, &delta);
3991 if (stmt_after_increment (loop, cand, at))
3992 aff_combination_add (&delta, &step);
3994 tree_to_aff_combination (iv->base, type, val);
3995 aff_combination_add (val, &delta);
3998 /* Returns period of induction variable iv. */
4000 static tree
4001 iv_period (struct iv *iv)
4003 tree step = iv->step, period, type;
4004 tree pow2div;
4006 gcc_assert (step && TREE_CODE (step) == INTEGER_CST);
4008 /* Period of the iv is gcd (step, type range). Since type range is power
4009 of two, it suffices to determine the maximum power of two that divides
4010 step. */
4011 pow2div = num_ending_zeros (step);
4012 type = unsigned_type_for (TREE_TYPE (step));
4014 period = build_low_bits_mask (type,
4015 (TYPE_PRECISION (type)
4016 - tree_low_cst (pow2div, 1)));
4018 return period;
4021 /* Returns the comparison operator used when eliminating the iv USE. */
4023 static enum tree_code
4024 iv_elimination_compare (struct ivopts_data *data, struct iv_use *use)
4026 struct loop *loop = data->current_loop;
4027 basic_block ex_bb;
4028 edge exit;
4030 ex_bb = gimple_bb (use->stmt);
4031 exit = EDGE_SUCC (ex_bb, 0);
4032 if (flow_bb_inside_loop_p (loop, exit->dest))
4033 exit = EDGE_SUCC (ex_bb, 1);
4035 return (exit->flags & EDGE_TRUE_VALUE ? EQ_EXPR : NE_EXPR);
4038 /* Check whether it is possible to express the condition in USE by comparison
4039 of candidate CAND. If so, store the value compared with to BOUND. */
4041 static bool
4042 may_eliminate_iv (struct ivopts_data *data,
4043 struct iv_use *use, struct iv_cand *cand, tree *bound)
4045 basic_block ex_bb;
4046 edge exit;
4047 tree nit, period;
4048 struct loop *loop = data->current_loop;
4049 aff_tree bnd;
4051 if (TREE_CODE (cand->iv->step) != INTEGER_CST)
4052 return false;
4054 /* For now works only for exits that dominate the loop latch.
4055 TODO: extend to other conditions inside loop body. */
4056 ex_bb = gimple_bb (use->stmt);
4057 if (use->stmt != last_stmt (ex_bb)
4058 || gimple_code (use->stmt) != GIMPLE_COND
4059 || !dominated_by_p (CDI_DOMINATORS, loop->latch, ex_bb))
4060 return false;
4062 exit = EDGE_SUCC (ex_bb, 0);
4063 if (flow_bb_inside_loop_p (loop, exit->dest))
4064 exit = EDGE_SUCC (ex_bb, 1);
4065 if (flow_bb_inside_loop_p (loop, exit->dest))
4066 return false;
4068 nit = niter_for_exit (data, exit);
4069 if (!nit)
4070 return false;
4072 /* Determine whether we can use the variable to test the exit condition.
4073 This is the case iff the period of the induction variable is greater
4074 than the number of iterations for which the exit condition is true. */
4075 period = iv_period (cand->iv);
4077 /* If the number of iterations is constant, compare against it directly. */
4078 if (TREE_CODE (nit) == INTEGER_CST)
4080 if (!tree_int_cst_lt (nit, period))
4081 return false;
4084 /* If not, and if this is the only possible exit of the loop, see whether
4085 we can get a conservative estimate on the number of iterations of the
4086 entire loop and compare against that instead. */
4087 else if (loop_only_exit_p (loop, exit))
4089 double_int period_value, max_niter;
4090 if (!estimated_loop_iterations (loop, true, &max_niter))
4091 return false;
4092 period_value = tree_to_double_int (period);
4093 if (double_int_ucmp (max_niter, period_value) >= 0)
4094 return false;
4097 /* Otherwise, punt. */
4098 else
4099 return false;
4101 cand_value_at (loop, cand, use->stmt, nit, &bnd);
4103 *bound = aff_combination_to_tree (&bnd);
4104 /* It is unlikely that computing the number of iterations using division
4105 would be more profitable than keeping the original induction variable. */
4106 if (expression_expensive_p (*bound))
4107 return false;
4108 return true;
4111 /* Determines cost of basing replacement of USE on CAND in a condition. */
4113 static bool
4114 determine_use_iv_cost_condition (struct ivopts_data *data,
4115 struct iv_use *use, struct iv_cand *cand)
4117 tree bound = NULL_TREE;
4118 struct iv *cmp_iv;
4119 bitmap depends_on_elim = NULL, depends_on_express = NULL, depends_on;
4120 comp_cost elim_cost, express_cost, cost;
4121 bool ok;
4122 tree *control_var, *bound_cst;
4124 /* Only consider real candidates. */
4125 if (!cand->iv)
4127 set_use_iv_cost (data, use, cand, infinite_cost, NULL, NULL_TREE);
4128 return false;
4131 /* Try iv elimination. */
4132 if (may_eliminate_iv (data, use, cand, &bound))
4134 elim_cost = force_var_cost (data, bound, &depends_on_elim);
4135 /* The bound is a loop invariant, so it will be only computed
4136 once. */
4137 elim_cost.cost = adjust_setup_cost (data, elim_cost.cost);
4139 else
4140 elim_cost = infinite_cost;
4142 /* Try expressing the original giv. If it is compared with an invariant,
4143 note that we cannot get rid of it. */
4144 ok = extract_cond_operands (data, use->stmt, &control_var, &bound_cst,
4145 NULL, &cmp_iv);
4146 gcc_assert (ok);
4148 /* When the condition is a comparison of the candidate IV against
4149 zero, prefer this IV.
4151 TODO: The constant that we're substracting from the cost should
4152 be target-dependent. This information should be added to the
4153 target costs for each backend. */
4154 if (!infinite_cost_p (elim_cost) /* Do not try to decrease infinite! */
4155 && integer_zerop (*bound_cst)
4156 && (operand_equal_p (*control_var, cand->var_after, 0)
4157 || operand_equal_p (*control_var, cand->var_before, 0)))
4158 elim_cost.cost -= 1;
4160 express_cost = get_computation_cost (data, use, cand, false,
4161 &depends_on_express, NULL);
4162 fd_ivopts_data = data;
4163 walk_tree (&cmp_iv->base, find_depends, &depends_on_express, NULL);
4165 /* Choose the better approach, preferring the eliminated IV. */
4166 if (compare_costs (elim_cost, express_cost) <= 0)
4168 cost = elim_cost;
4169 depends_on = depends_on_elim;
4170 depends_on_elim = NULL;
4172 else
4174 cost = express_cost;
4175 depends_on = depends_on_express;
4176 depends_on_express = NULL;
4177 bound = NULL_TREE;
4180 set_use_iv_cost (data, use, cand, cost, depends_on, bound);
4182 if (depends_on_elim)
4183 BITMAP_FREE (depends_on_elim);
4184 if (depends_on_express)
4185 BITMAP_FREE (depends_on_express);
4187 return !infinite_cost_p (cost);
4190 /* Determines cost of basing replacement of USE on CAND. Returns false
4191 if USE cannot be based on CAND. */
4193 static bool
4194 determine_use_iv_cost (struct ivopts_data *data,
4195 struct iv_use *use, struct iv_cand *cand)
4197 switch (use->type)
4199 case USE_NONLINEAR_EXPR:
4200 return determine_use_iv_cost_generic (data, use, cand);
4202 case USE_ADDRESS:
4203 return determine_use_iv_cost_address (data, use, cand);
4205 case USE_COMPARE:
4206 return determine_use_iv_cost_condition (data, use, cand);
4208 default:
4209 gcc_unreachable ();
4213 /* Return true if get_computation_cost indicates that autoincrement is
4214 a possibility for the pair of USE and CAND, false otherwise. */
4216 static bool
4217 autoinc_possible_for_pair (struct ivopts_data *data, struct iv_use *use,
4218 struct iv_cand *cand)
4220 bitmap depends_on;
4221 bool can_autoinc;
4222 comp_cost cost;
4224 if (use->type != USE_ADDRESS)
4225 return false;
4227 cost = get_computation_cost (data, use, cand, true, &depends_on,
4228 &can_autoinc);
4230 BITMAP_FREE (depends_on);
4232 return !infinite_cost_p (cost) && can_autoinc;
4235 /* Examine IP_ORIGINAL candidates to see if they are incremented next to a
4236 use that allows autoincrement, and set their AINC_USE if possible. */
4238 static void
4239 set_autoinc_for_original_candidates (struct ivopts_data *data)
4241 unsigned i, j;
4243 for (i = 0; i < n_iv_cands (data); i++)
4245 struct iv_cand *cand = iv_cand (data, i);
4246 struct iv_use *closest = NULL;
4247 if (cand->pos != IP_ORIGINAL)
4248 continue;
4249 for (j = 0; j < n_iv_uses (data); j++)
4251 struct iv_use *use = iv_use (data, j);
4252 unsigned uid = gimple_uid (use->stmt);
4253 if (gimple_bb (use->stmt) != gimple_bb (cand->incremented_at)
4254 || uid > gimple_uid (cand->incremented_at))
4255 continue;
4256 if (closest == NULL || uid > gimple_uid (closest->stmt))
4257 closest = use;
4259 if (closest == NULL || !autoinc_possible_for_pair (data, closest, cand))
4260 continue;
4261 cand->ainc_use = closest;
4265 /* Finds the candidates for the induction variables. */
4267 static void
4268 find_iv_candidates (struct ivopts_data *data)
4270 /* Add commonly used ivs. */
4271 add_standard_iv_candidates (data);
4273 /* Add old induction variables. */
4274 add_old_ivs_candidates (data);
4276 /* Add induction variables derived from uses. */
4277 add_derived_ivs_candidates (data);
4279 set_autoinc_for_original_candidates (data);
4281 /* Record the important candidates. */
4282 record_important_candidates (data);
4285 /* Determines costs of basing the use of the iv on an iv candidate. */
4287 static void
4288 determine_use_iv_costs (struct ivopts_data *data)
4290 unsigned i, j;
4291 struct iv_use *use;
4292 struct iv_cand *cand;
4293 bitmap to_clear = BITMAP_ALLOC (NULL);
4295 alloc_use_cost_map (data);
4297 for (i = 0; i < n_iv_uses (data); i++)
4299 use = iv_use (data, i);
4301 if (data->consider_all_candidates)
4303 for (j = 0; j < n_iv_cands (data); j++)
4305 cand = iv_cand (data, j);
4306 determine_use_iv_cost (data, use, cand);
4309 else
4311 bitmap_iterator bi;
4313 EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j, bi)
4315 cand = iv_cand (data, j);
4316 if (!determine_use_iv_cost (data, use, cand))
4317 bitmap_set_bit (to_clear, j);
4320 /* Remove the candidates for that the cost is infinite from
4321 the list of related candidates. */
4322 bitmap_and_compl_into (use->related_cands, to_clear);
4323 bitmap_clear (to_clear);
4327 BITMAP_FREE (to_clear);
4329 if (dump_file && (dump_flags & TDF_DETAILS))
4331 fprintf (dump_file, "Use-candidate costs:\n");
4333 for (i = 0; i < n_iv_uses (data); i++)
4335 use = iv_use (data, i);
4337 fprintf (dump_file, "Use %d:\n", i);
4338 fprintf (dump_file, " cand\tcost\tcompl.\tdepends on\n");
4339 for (j = 0; j < use->n_map_members; j++)
4341 if (!use->cost_map[j].cand
4342 || infinite_cost_p (use->cost_map[j].cost))
4343 continue;
4345 fprintf (dump_file, " %d\t%d\t%d\t",
4346 use->cost_map[j].cand->id,
4347 use->cost_map[j].cost.cost,
4348 use->cost_map[j].cost.complexity);
4349 if (use->cost_map[j].depends_on)
4350 bitmap_print (dump_file,
4351 use->cost_map[j].depends_on, "","");
4352 fprintf (dump_file, "\n");
4355 fprintf (dump_file, "\n");
4357 fprintf (dump_file, "\n");
4361 /* Determines cost of the candidate CAND. */
4363 static void
4364 determine_iv_cost (struct ivopts_data *data, struct iv_cand *cand)
4366 comp_cost cost_base;
4367 unsigned cost, cost_step;
4368 tree base;
4370 if (!cand->iv)
4372 cand->cost = 0;
4373 return;
4376 /* There are two costs associated with the candidate -- its increment
4377 and its initialization. The second is almost negligible for any loop
4378 that rolls enough, so we take it just very little into account. */
4380 base = cand->iv->base;
4381 cost_base = force_var_cost (data, base, NULL);
4382 cost_step = add_cost (TYPE_MODE (TREE_TYPE (base)), data->speed);
4384 cost = cost_step + adjust_setup_cost (data, cost_base.cost);
4386 /* Prefer the original ivs unless we may gain something by replacing it.
4387 The reason is to make debugging simpler; so this is not relevant for
4388 artificial ivs created by other optimization passes. */
4389 if (cand->pos != IP_ORIGINAL
4390 || DECL_ARTIFICIAL (SSA_NAME_VAR (cand->var_before)))
4391 cost++;
4393 /* Prefer not to insert statements into latch unless there are some
4394 already (so that we do not create unnecessary jumps). */
4395 if (cand->pos == IP_END
4396 && empty_block_p (ip_end_pos (data->current_loop)))
4397 cost++;
4399 cand->cost = cost;
4400 cand->cost_step = cost_step;
4403 /* Determines costs of computation of the candidates. */
4405 static void
4406 determine_iv_costs (struct ivopts_data *data)
4408 unsigned i;
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4412 fprintf (dump_file, "Candidate costs:\n");
4413 fprintf (dump_file, " cand\tcost\n");
4416 for (i = 0; i < n_iv_cands (data); i++)
4418 struct iv_cand *cand = iv_cand (data, i);
4420 determine_iv_cost (data, cand);
4422 if (dump_file && (dump_flags & TDF_DETAILS))
4423 fprintf (dump_file, " %d\t%d\n", i, cand->cost);
4426 if (dump_file && (dump_flags & TDF_DETAILS))
4427 fprintf (dump_file, "\n");
4430 /* Calculates cost for having SIZE induction variables. */
4432 static unsigned
4433 ivopts_global_cost_for_size (struct ivopts_data *data, unsigned size)
4435 /* We add size to the cost, so that we prefer eliminating ivs
4436 if possible. */
4437 return size + estimate_reg_pressure_cost (size, data->regs_used, data->speed,
4438 data->body_includes_call);
4441 /* For each size of the induction variable set determine the penalty. */
4443 static void
4444 determine_set_costs (struct ivopts_data *data)
4446 unsigned j, n;
4447 gimple phi;
4448 gimple_stmt_iterator psi;
4449 tree op;
4450 struct loop *loop = data->current_loop;
4451 bitmap_iterator bi;
4453 if (dump_file && (dump_flags & TDF_DETAILS))
4455 fprintf (dump_file, "Global costs:\n");
4456 fprintf (dump_file, " target_avail_regs %d\n", target_avail_regs);
4457 fprintf (dump_file, " target_clobbered_regs %d\n", target_clobbered_regs);
4458 fprintf (dump_file, " target_reg_cost %d\n", target_reg_cost[data->speed]);
4459 fprintf (dump_file, " target_spill_cost %d\n", target_spill_cost[data->speed]);
4462 n = 0;
4463 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
4465 phi = gsi_stmt (psi);
4466 op = PHI_RESULT (phi);
4468 if (!is_gimple_reg (op))
4469 continue;
4471 if (get_iv (data, op))
4472 continue;
4474 n++;
4477 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j, bi)
4479 struct version_info *info = ver_info (data, j);
4481 if (info->inv_id && info->has_nonlin_use)
4482 n++;
4485 data->regs_used = n;
4486 if (dump_file && (dump_flags & TDF_DETAILS))
4487 fprintf (dump_file, " regs_used %d\n", n);
4489 if (dump_file && (dump_flags & TDF_DETAILS))
4491 fprintf (dump_file, " cost for size:\n");
4492 fprintf (dump_file, " ivs\tcost\n");
4493 for (j = 0; j <= 2 * target_avail_regs; j++)
4494 fprintf (dump_file, " %d\t%d\n", j,
4495 ivopts_global_cost_for_size (data, j));
4496 fprintf (dump_file, "\n");
4500 /* Returns true if A is a cheaper cost pair than B. */
4502 static bool
4503 cheaper_cost_pair (struct cost_pair *a, struct cost_pair *b)
4505 int cmp;
4507 if (!a)
4508 return false;
4510 if (!b)
4511 return true;
4513 cmp = compare_costs (a->cost, b->cost);
4514 if (cmp < 0)
4515 return true;
4517 if (cmp > 0)
4518 return false;
4520 /* In case the costs are the same, prefer the cheaper candidate. */
4521 if (a->cand->cost < b->cand->cost)
4522 return true;
4524 return false;
4527 /* Computes the cost field of IVS structure. */
4529 static void
4530 iv_ca_recount_cost (struct ivopts_data *data, struct iv_ca *ivs)
4532 comp_cost cost = ivs->cand_use_cost;
4533 cost.cost += ivs->cand_cost;
4534 cost.cost += ivopts_global_cost_for_size (data, ivs->n_regs);
4536 ivs->cost = cost;
4539 /* Remove invariants in set INVS to set IVS. */
4541 static void
4542 iv_ca_set_remove_invariants (struct iv_ca *ivs, bitmap invs)
4544 bitmap_iterator bi;
4545 unsigned iid;
4547 if (!invs)
4548 return;
4550 EXECUTE_IF_SET_IN_BITMAP (invs, 0, iid, bi)
4552 ivs->n_invariant_uses[iid]--;
4553 if (ivs->n_invariant_uses[iid] == 0)
4554 ivs->n_regs--;
4558 /* Set USE not to be expressed by any candidate in IVS. */
4560 static void
4561 iv_ca_set_no_cp (struct ivopts_data *data, struct iv_ca *ivs,
4562 struct iv_use *use)
4564 unsigned uid = use->id, cid;
4565 struct cost_pair *cp;
4567 cp = ivs->cand_for_use[uid];
4568 if (!cp)
4569 return;
4570 cid = cp->cand->id;
4572 ivs->bad_uses++;
4573 ivs->cand_for_use[uid] = NULL;
4574 ivs->n_cand_uses[cid]--;
4576 if (ivs->n_cand_uses[cid] == 0)
4578 bitmap_clear_bit (ivs->cands, cid);
4579 /* Do not count the pseudocandidates. */
4580 if (cp->cand->iv)
4581 ivs->n_regs--;
4582 ivs->n_cands--;
4583 ivs->cand_cost -= cp->cand->cost;
4585 iv_ca_set_remove_invariants (ivs, cp->cand->depends_on);
4588 ivs->cand_use_cost = sub_costs (ivs->cand_use_cost, cp->cost);
4590 iv_ca_set_remove_invariants (ivs, cp->depends_on);
4591 iv_ca_recount_cost (data, ivs);
4594 /* Add invariants in set INVS to set IVS. */
4596 static void
4597 iv_ca_set_add_invariants (struct iv_ca *ivs, bitmap invs)
4599 bitmap_iterator bi;
4600 unsigned iid;
4602 if (!invs)
4603 return;
4605 EXECUTE_IF_SET_IN_BITMAP (invs, 0, iid, bi)
4607 ivs->n_invariant_uses[iid]++;
4608 if (ivs->n_invariant_uses[iid] == 1)
4609 ivs->n_regs++;
4613 /* Set cost pair for USE in set IVS to CP. */
4615 static void
4616 iv_ca_set_cp (struct ivopts_data *data, struct iv_ca *ivs,
4617 struct iv_use *use, struct cost_pair *cp)
4619 unsigned uid = use->id, cid;
4621 if (ivs->cand_for_use[uid] == cp)
4622 return;
4624 if (ivs->cand_for_use[uid])
4625 iv_ca_set_no_cp (data, ivs, use);
4627 if (cp)
4629 cid = cp->cand->id;
4631 ivs->bad_uses--;
4632 ivs->cand_for_use[uid] = cp;
4633 ivs->n_cand_uses[cid]++;
4634 if (ivs->n_cand_uses[cid] == 1)
4636 bitmap_set_bit (ivs->cands, cid);
4637 /* Do not count the pseudocandidates. */
4638 if (cp->cand->iv)
4639 ivs->n_regs++;
4640 ivs->n_cands++;
4641 ivs->cand_cost += cp->cand->cost;
4643 iv_ca_set_add_invariants (ivs, cp->cand->depends_on);
4646 ivs->cand_use_cost = add_costs (ivs->cand_use_cost, cp->cost);
4647 iv_ca_set_add_invariants (ivs, cp->depends_on);
4648 iv_ca_recount_cost (data, ivs);
4652 /* Extend set IVS by expressing USE by some of the candidates in it
4653 if possible. */
4655 static void
4656 iv_ca_add_use (struct ivopts_data *data, struct iv_ca *ivs,
4657 struct iv_use *use)
4659 struct cost_pair *best_cp = NULL, *cp;
4660 bitmap_iterator bi;
4661 unsigned i;
4663 gcc_assert (ivs->upto >= use->id);
4665 if (ivs->upto == use->id)
4667 ivs->upto++;
4668 ivs->bad_uses++;
4671 EXECUTE_IF_SET_IN_BITMAP (ivs->cands, 0, i, bi)
4673 cp = get_use_iv_cost (data, use, iv_cand (data, i));
4675 if (cheaper_cost_pair (cp, best_cp))
4676 best_cp = cp;
4679 iv_ca_set_cp (data, ivs, use, best_cp);
4682 /* Get cost for assignment IVS. */
4684 static comp_cost
4685 iv_ca_cost (struct iv_ca *ivs)
4687 /* This was a conditional expression but it triggered a bug in
4688 Sun C 5.5. */
4689 if (ivs->bad_uses)
4690 return infinite_cost;
4691 else
4692 return ivs->cost;
4695 /* Returns true if all dependences of CP are among invariants in IVS. */
4697 static bool
4698 iv_ca_has_deps (struct iv_ca *ivs, struct cost_pair *cp)
4700 unsigned i;
4701 bitmap_iterator bi;
4703 if (!cp->depends_on)
4704 return true;
4706 EXECUTE_IF_SET_IN_BITMAP (cp->depends_on, 0, i, bi)
4708 if (ivs->n_invariant_uses[i] == 0)
4709 return false;
4712 return true;
4715 /* Creates change of expressing USE by NEW_CP instead of OLD_CP and chains
4716 it before NEXT_CHANGE. */
4718 static struct iv_ca_delta *
4719 iv_ca_delta_add (struct iv_use *use, struct cost_pair *old_cp,
4720 struct cost_pair *new_cp, struct iv_ca_delta *next_change)
4722 struct iv_ca_delta *change = XNEW (struct iv_ca_delta);
4724 change->use = use;
4725 change->old_cp = old_cp;
4726 change->new_cp = new_cp;
4727 change->next_change = next_change;
4729 return change;
4732 /* Joins two lists of changes L1 and L2. Destructive -- old lists
4733 are rewritten. */
4735 static struct iv_ca_delta *
4736 iv_ca_delta_join (struct iv_ca_delta *l1, struct iv_ca_delta *l2)
4738 struct iv_ca_delta *last;
4740 if (!l2)
4741 return l1;
4743 if (!l1)
4744 return l2;
4746 for (last = l1; last->next_change; last = last->next_change)
4747 continue;
4748 last->next_change = l2;
4750 return l1;
4753 /* Returns candidate by that USE is expressed in IVS. */
4755 static struct cost_pair *
4756 iv_ca_cand_for_use (struct iv_ca *ivs, struct iv_use *use)
4758 return ivs->cand_for_use[use->id];
4761 /* Reverse the list of changes DELTA, forming the inverse to it. */
4763 static struct iv_ca_delta *
4764 iv_ca_delta_reverse (struct iv_ca_delta *delta)
4766 struct iv_ca_delta *act, *next, *prev = NULL;
4767 struct cost_pair *tmp;
4769 for (act = delta; act; act = next)
4771 next = act->next_change;
4772 act->next_change = prev;
4773 prev = act;
4775 tmp = act->old_cp;
4776 act->old_cp = act->new_cp;
4777 act->new_cp = tmp;
4780 return prev;
4783 /* Commit changes in DELTA to IVS. If FORWARD is false, the changes are
4784 reverted instead. */
4786 static void
4787 iv_ca_delta_commit (struct ivopts_data *data, struct iv_ca *ivs,
4788 struct iv_ca_delta *delta, bool forward)
4790 struct cost_pair *from, *to;
4791 struct iv_ca_delta *act;
4793 if (!forward)
4794 delta = iv_ca_delta_reverse (delta);
4796 for (act = delta; act; act = act->next_change)
4798 from = act->old_cp;
4799 to = act->new_cp;
4800 gcc_assert (iv_ca_cand_for_use (ivs, act->use) == from);
4801 iv_ca_set_cp (data, ivs, act->use, to);
4804 if (!forward)
4805 iv_ca_delta_reverse (delta);
4808 /* Returns true if CAND is used in IVS. */
4810 static bool
4811 iv_ca_cand_used_p (struct iv_ca *ivs, struct iv_cand *cand)
4813 return ivs->n_cand_uses[cand->id] > 0;
4816 /* Returns number of induction variable candidates in the set IVS. */
4818 static unsigned
4819 iv_ca_n_cands (struct iv_ca *ivs)
4821 return ivs->n_cands;
4824 /* Free the list of changes DELTA. */
4826 static void
4827 iv_ca_delta_free (struct iv_ca_delta **delta)
4829 struct iv_ca_delta *act, *next;
4831 for (act = *delta; act; act = next)
4833 next = act->next_change;
4834 free (act);
4837 *delta = NULL;
4840 /* Allocates new iv candidates assignment. */
4842 static struct iv_ca *
4843 iv_ca_new (struct ivopts_data *data)
4845 struct iv_ca *nw = XNEW (struct iv_ca);
4847 nw->upto = 0;
4848 nw->bad_uses = 0;
4849 nw->cand_for_use = XCNEWVEC (struct cost_pair *, n_iv_uses (data));
4850 nw->n_cand_uses = XCNEWVEC (unsigned, n_iv_cands (data));
4851 nw->cands = BITMAP_ALLOC (NULL);
4852 nw->n_cands = 0;
4853 nw->n_regs = 0;
4854 nw->cand_use_cost = zero_cost;
4855 nw->cand_cost = 0;
4856 nw->n_invariant_uses = XCNEWVEC (unsigned, data->max_inv_id + 1);
4857 nw->cost = zero_cost;
4859 return nw;
4862 /* Free memory occupied by the set IVS. */
4864 static void
4865 iv_ca_free (struct iv_ca **ivs)
4867 free ((*ivs)->cand_for_use);
4868 free ((*ivs)->n_cand_uses);
4869 BITMAP_FREE ((*ivs)->cands);
4870 free ((*ivs)->n_invariant_uses);
4871 free (*ivs);
4872 *ivs = NULL;
4875 /* Dumps IVS to FILE. */
4877 static void
4878 iv_ca_dump (struct ivopts_data *data, FILE *file, struct iv_ca *ivs)
4880 const char *pref = " invariants ";
4881 unsigned i;
4882 comp_cost cost = iv_ca_cost (ivs);
4884 fprintf (file, " cost %d (complexity %d)\n", cost.cost, cost.complexity);
4885 bitmap_print (file, ivs->cands, " candidates ","\n");
4887 for (i = 1; i <= data->max_inv_id; i++)
4888 if (ivs->n_invariant_uses[i])
4890 fprintf (file, "%s%d", pref, i);
4891 pref = ", ";
4893 fprintf (file, "\n");
4896 /* Try changing candidate in IVS to CAND for each use. Return cost of the
4897 new set, and store differences in DELTA. Number of induction variables
4898 in the new set is stored to N_IVS. */
4900 static comp_cost
4901 iv_ca_extend (struct ivopts_data *data, struct iv_ca *ivs,
4902 struct iv_cand *cand, struct iv_ca_delta **delta,
4903 unsigned *n_ivs)
4905 unsigned i;
4906 comp_cost cost;
4907 struct iv_use *use;
4908 struct cost_pair *old_cp, *new_cp;
4910 *delta = NULL;
4911 for (i = 0; i < ivs->upto; i++)
4913 use = iv_use (data, i);
4914 old_cp = iv_ca_cand_for_use (ivs, use);
4916 if (old_cp
4917 && old_cp->cand == cand)
4918 continue;
4920 new_cp = get_use_iv_cost (data, use, cand);
4921 if (!new_cp)
4922 continue;
4924 if (!iv_ca_has_deps (ivs, new_cp))
4925 continue;
4927 if (!cheaper_cost_pair (new_cp, old_cp))
4928 continue;
4930 *delta = iv_ca_delta_add (use, old_cp, new_cp, *delta);
4933 iv_ca_delta_commit (data, ivs, *delta, true);
4934 cost = iv_ca_cost (ivs);
4935 if (n_ivs)
4936 *n_ivs = iv_ca_n_cands (ivs);
4937 iv_ca_delta_commit (data, ivs, *delta, false);
4939 return cost;
4942 /* Try narrowing set IVS by removing CAND. Return the cost of
4943 the new set and store the differences in DELTA. */
4945 static comp_cost
4946 iv_ca_narrow (struct ivopts_data *data, struct iv_ca *ivs,
4947 struct iv_cand *cand, struct iv_ca_delta **delta)
4949 unsigned i, ci;
4950 struct iv_use *use;
4951 struct cost_pair *old_cp, *new_cp, *cp;
4952 bitmap_iterator bi;
4953 struct iv_cand *cnd;
4954 comp_cost cost;
4956 *delta = NULL;
4957 for (i = 0; i < n_iv_uses (data); i++)
4959 use = iv_use (data, i);
4961 old_cp = iv_ca_cand_for_use (ivs, use);
4962 if (old_cp->cand != cand)
4963 continue;
4965 new_cp = NULL;
4967 if (data->consider_all_candidates)
4969 EXECUTE_IF_SET_IN_BITMAP (ivs->cands, 0, ci, bi)
4971 if (ci == cand->id)
4972 continue;
4974 cnd = iv_cand (data, ci);
4976 cp = get_use_iv_cost (data, use, cnd);
4977 if (!cp)
4978 continue;
4979 if (!iv_ca_has_deps (ivs, cp))
4980 continue;
4982 if (!cheaper_cost_pair (cp, new_cp))
4983 continue;
4985 new_cp = cp;
4988 else
4990 EXECUTE_IF_AND_IN_BITMAP (use->related_cands, ivs->cands, 0, ci, bi)
4992 if (ci == cand->id)
4993 continue;
4995 cnd = iv_cand (data, ci);
4997 cp = get_use_iv_cost (data, use, cnd);
4998 if (!cp)
4999 continue;
5000 if (!iv_ca_has_deps (ivs, cp))
5001 continue;
5003 if (!cheaper_cost_pair (cp, new_cp))
5004 continue;
5006 new_cp = cp;
5010 if (!new_cp)
5012 iv_ca_delta_free (delta);
5013 return infinite_cost;
5016 *delta = iv_ca_delta_add (use, old_cp, new_cp, *delta);
5019 iv_ca_delta_commit (data, ivs, *delta, true);
5020 cost = iv_ca_cost (ivs);
5021 iv_ca_delta_commit (data, ivs, *delta, false);
5023 return cost;
5026 /* Try optimizing the set of candidates IVS by removing candidates different
5027 from to EXCEPT_CAND from it. Return cost of the new set, and store
5028 differences in DELTA. */
5030 static comp_cost
5031 iv_ca_prune (struct ivopts_data *data, struct iv_ca *ivs,
5032 struct iv_cand *except_cand, struct iv_ca_delta **delta)
5034 bitmap_iterator bi;
5035 struct iv_ca_delta *act_delta, *best_delta;
5036 unsigned i;
5037 comp_cost best_cost, acost;
5038 struct iv_cand *cand;
5040 best_delta = NULL;
5041 best_cost = iv_ca_cost (ivs);
5043 EXECUTE_IF_SET_IN_BITMAP (ivs->cands, 0, i, bi)
5045 cand = iv_cand (data, i);
5047 if (cand == except_cand)
5048 continue;
5050 acost = iv_ca_narrow (data, ivs, cand, &act_delta);
5052 if (compare_costs (acost, best_cost) < 0)
5054 best_cost = acost;
5055 iv_ca_delta_free (&best_delta);
5056 best_delta = act_delta;
5058 else
5059 iv_ca_delta_free (&act_delta);
5062 if (!best_delta)
5064 *delta = NULL;
5065 return best_cost;
5068 /* Recurse to possibly remove other unnecessary ivs. */
5069 iv_ca_delta_commit (data, ivs, best_delta, true);
5070 best_cost = iv_ca_prune (data, ivs, except_cand, delta);
5071 iv_ca_delta_commit (data, ivs, best_delta, false);
5072 *delta = iv_ca_delta_join (best_delta, *delta);
5073 return best_cost;
5076 /* Tries to extend the sets IVS in the best possible way in order
5077 to express the USE. If ORIGINALP is true, prefer candidates from
5078 the original set of IVs, otherwise favor important candidates not
5079 based on any memory object. */
5081 static bool
5082 try_add_cand_for (struct ivopts_data *data, struct iv_ca *ivs,
5083 struct iv_use *use, bool originalp)
5085 comp_cost best_cost, act_cost;
5086 unsigned i;
5087 bitmap_iterator bi;
5088 struct iv_cand *cand;
5089 struct iv_ca_delta *best_delta = NULL, *act_delta;
5090 struct cost_pair *cp;
5092 iv_ca_add_use (data, ivs, use);
5093 best_cost = iv_ca_cost (ivs);
5095 cp = iv_ca_cand_for_use (ivs, use);
5096 if (cp)
5098 best_delta = iv_ca_delta_add (use, NULL, cp, NULL);
5099 iv_ca_set_no_cp (data, ivs, use);
5102 /* If ORIGINALP is true, try to find the original IV for the use. Otherwise
5103 first try important candidates not based on any memory object. Only if
5104 this fails, try the specific ones. Rationale -- in loops with many
5105 variables the best choice often is to use just one generic biv. If we
5106 added here many ivs specific to the uses, the optimization algorithm later
5107 would be likely to get stuck in a local minimum, thus causing us to create
5108 too many ivs. The approach from few ivs to more seems more likely to be
5109 successful -- starting from few ivs, replacing an expensive use by a
5110 specific iv should always be a win. */
5111 EXECUTE_IF_SET_IN_BITMAP (data->important_candidates, 0, i, bi)
5113 cand = iv_cand (data, i);
5115 if (originalp && cand->pos !=IP_ORIGINAL)
5116 continue;
5118 if (!originalp && cand->iv->base_object != NULL_TREE)
5119 continue;
5121 if (iv_ca_cand_used_p (ivs, cand))
5122 continue;
5124 cp = get_use_iv_cost (data, use, cand);
5125 if (!cp)
5126 continue;
5128 iv_ca_set_cp (data, ivs, use, cp);
5129 act_cost = iv_ca_extend (data, ivs, cand, &act_delta, NULL);
5130 iv_ca_set_no_cp (data, ivs, use);
5131 act_delta = iv_ca_delta_add (use, NULL, cp, act_delta);
5133 if (compare_costs (act_cost, best_cost) < 0)
5135 best_cost = act_cost;
5137 iv_ca_delta_free (&best_delta);
5138 best_delta = act_delta;
5140 else
5141 iv_ca_delta_free (&act_delta);
5144 if (infinite_cost_p (best_cost))
5146 for (i = 0; i < use->n_map_members; i++)
5148 cp = use->cost_map + i;
5149 cand = cp->cand;
5150 if (!cand)
5151 continue;
5153 /* Already tried this. */
5154 if (cand->important)
5156 if (originalp && cand->pos == IP_ORIGINAL)
5157 continue;
5158 if (!originalp && cand->iv->base_object == NULL_TREE)
5159 continue;
5162 if (iv_ca_cand_used_p (ivs, cand))
5163 continue;
5165 act_delta = NULL;
5166 iv_ca_set_cp (data, ivs, use, cp);
5167 act_cost = iv_ca_extend (data, ivs, cand, &act_delta, NULL);
5168 iv_ca_set_no_cp (data, ivs, use);
5169 act_delta = iv_ca_delta_add (use, iv_ca_cand_for_use (ivs, use),
5170 cp, act_delta);
5172 if (compare_costs (act_cost, best_cost) < 0)
5174 best_cost = act_cost;
5176 if (best_delta)
5177 iv_ca_delta_free (&best_delta);
5178 best_delta = act_delta;
5180 else
5181 iv_ca_delta_free (&act_delta);
5185 iv_ca_delta_commit (data, ivs, best_delta, true);
5186 iv_ca_delta_free (&best_delta);
5188 return !infinite_cost_p (best_cost);
5191 /* Finds an initial assignment of candidates to uses. */
5193 static struct iv_ca *
5194 get_initial_solution (struct ivopts_data *data, bool originalp)
5196 struct iv_ca *ivs = iv_ca_new (data);
5197 unsigned i;
5199 for (i = 0; i < n_iv_uses (data); i++)
5200 if (!try_add_cand_for (data, ivs, iv_use (data, i), originalp))
5202 iv_ca_free (&ivs);
5203 return NULL;
5206 return ivs;
5209 /* Tries to improve set of induction variables IVS. */
5211 static bool
5212 try_improve_iv_set (struct ivopts_data *data, struct iv_ca *ivs)
5214 unsigned i, n_ivs;
5215 comp_cost acost, best_cost = iv_ca_cost (ivs);
5216 struct iv_ca_delta *best_delta = NULL, *act_delta, *tmp_delta;
5217 struct iv_cand *cand;
5219 /* Try extending the set of induction variables by one. */
5220 for (i = 0; i < n_iv_cands (data); i++)
5222 cand = iv_cand (data, i);
5224 if (iv_ca_cand_used_p (ivs, cand))
5225 continue;
5227 acost = iv_ca_extend (data, ivs, cand, &act_delta, &n_ivs);
5228 if (!act_delta)
5229 continue;
5231 /* If we successfully added the candidate and the set is small enough,
5232 try optimizing it by removing other candidates. */
5233 if (n_ivs <= ALWAYS_PRUNE_CAND_SET_BOUND)
5235 iv_ca_delta_commit (data, ivs, act_delta, true);
5236 acost = iv_ca_prune (data, ivs, cand, &tmp_delta);
5237 iv_ca_delta_commit (data, ivs, act_delta, false);
5238 act_delta = iv_ca_delta_join (act_delta, tmp_delta);
5241 if (compare_costs (acost, best_cost) < 0)
5243 best_cost = acost;
5244 iv_ca_delta_free (&best_delta);
5245 best_delta = act_delta;
5247 else
5248 iv_ca_delta_free (&act_delta);
5251 if (!best_delta)
5253 /* Try removing the candidates from the set instead. */
5254 best_cost = iv_ca_prune (data, ivs, NULL, &best_delta);
5256 /* Nothing more we can do. */
5257 if (!best_delta)
5258 return false;
5261 iv_ca_delta_commit (data, ivs, best_delta, true);
5262 gcc_assert (compare_costs (best_cost, iv_ca_cost (ivs)) == 0);
5263 iv_ca_delta_free (&best_delta);
5264 return true;
5267 /* Attempts to find the optimal set of induction variables. We do simple
5268 greedy heuristic -- we try to replace at most one candidate in the selected
5269 solution and remove the unused ivs while this improves the cost. */
5271 static struct iv_ca *
5272 find_optimal_iv_set_1 (struct ivopts_data *data, bool originalp)
5274 struct iv_ca *set;
5276 /* Get the initial solution. */
5277 set = get_initial_solution (data, originalp);
5278 if (!set)
5280 if (dump_file && (dump_flags & TDF_DETAILS))
5281 fprintf (dump_file, "Unable to substitute for ivs, failed.\n");
5282 return NULL;
5285 if (dump_file && (dump_flags & TDF_DETAILS))
5287 fprintf (dump_file, "Initial set of candidates:\n");
5288 iv_ca_dump (data, dump_file, set);
5291 while (try_improve_iv_set (data, set))
5293 if (dump_file && (dump_flags & TDF_DETAILS))
5295 fprintf (dump_file, "Improved to:\n");
5296 iv_ca_dump (data, dump_file, set);
5300 return set;
5303 static struct iv_ca *
5304 find_optimal_iv_set (struct ivopts_data *data)
5306 unsigned i;
5307 struct iv_ca *set, *origset;
5308 struct iv_use *use;
5309 comp_cost cost, origcost;
5311 /* Determine the cost based on a strategy that starts with original IVs,
5312 and try again using a strategy that prefers candidates not based
5313 on any IVs. */
5314 origset = find_optimal_iv_set_1 (data, true);
5315 set = find_optimal_iv_set_1 (data, false);
5317 if (!origset && !set)
5318 return NULL;
5320 origcost = origset ? iv_ca_cost (origset) : infinite_cost;
5321 cost = set ? iv_ca_cost (set) : infinite_cost;
5323 if (dump_file && (dump_flags & TDF_DETAILS))
5325 fprintf (dump_file, "Original cost %d (complexity %d)\n\n",
5326 origcost.cost, origcost.complexity);
5327 fprintf (dump_file, "Final cost %d (complexity %d)\n\n",
5328 cost.cost, cost.complexity);
5331 /* Choose the one with the best cost. */
5332 if (compare_costs (origcost, cost) <= 0)
5334 if (set)
5335 iv_ca_free (&set);
5336 set = origset;
5338 else if (origset)
5339 iv_ca_free (&origset);
5341 for (i = 0; i < n_iv_uses (data); i++)
5343 use = iv_use (data, i);
5344 use->selected = iv_ca_cand_for_use (set, use)->cand;
5347 return set;
5350 /* Creates a new induction variable corresponding to CAND. */
5352 static void
5353 create_new_iv (struct ivopts_data *data, struct iv_cand *cand)
5355 gimple_stmt_iterator incr_pos;
5356 tree base;
5357 bool after = false;
5359 if (!cand->iv)
5360 return;
5362 switch (cand->pos)
5364 case IP_NORMAL:
5365 incr_pos = gsi_last_bb (ip_normal_pos (data->current_loop));
5366 break;
5368 case IP_END:
5369 incr_pos = gsi_last_bb (ip_end_pos (data->current_loop));
5370 after = true;
5371 break;
5373 case IP_AFTER_USE:
5374 after = true;
5375 /* fall through */
5376 case IP_BEFORE_USE:
5377 incr_pos = gsi_for_stmt (cand->incremented_at);
5378 break;
5380 case IP_ORIGINAL:
5381 /* Mark that the iv is preserved. */
5382 name_info (data, cand->var_before)->preserve_biv = true;
5383 name_info (data, cand->var_after)->preserve_biv = true;
5385 /* Rewrite the increment so that it uses var_before directly. */
5386 find_interesting_uses_op (data, cand->var_after)->selected = cand;
5388 return;
5391 gimple_add_tmp_var (cand->var_before);
5392 add_referenced_var (cand->var_before);
5394 base = unshare_expr (cand->iv->base);
5396 create_iv (base, unshare_expr (cand->iv->step),
5397 cand->var_before, data->current_loop,
5398 &incr_pos, after, &cand->var_before, &cand->var_after);
5401 /* Creates new induction variables described in SET. */
5403 static void
5404 create_new_ivs (struct ivopts_data *data, struct iv_ca *set)
5406 unsigned i;
5407 struct iv_cand *cand;
5408 bitmap_iterator bi;
5410 EXECUTE_IF_SET_IN_BITMAP (set->cands, 0, i, bi)
5412 cand = iv_cand (data, i);
5413 create_new_iv (data, cand);
5418 /* Rewrites USE (definition of iv used in a nonlinear expression)
5419 using candidate CAND. */
5421 static void
5422 rewrite_use_nonlinear_expr (struct ivopts_data *data,
5423 struct iv_use *use, struct iv_cand *cand)
5425 tree comp;
5426 tree op, tgt;
5427 gimple ass;
5428 gimple_stmt_iterator bsi;
5430 /* An important special case -- if we are asked to express value of
5431 the original iv by itself, just exit; there is no need to
5432 introduce a new computation (that might also need casting the
5433 variable to unsigned and back). */
5434 if (cand->pos == IP_ORIGINAL
5435 && cand->incremented_at == use->stmt)
5437 tree step, ctype, utype;
5438 enum tree_code incr_code = PLUS_EXPR, old_code;
5440 gcc_assert (is_gimple_assign (use->stmt));
5441 gcc_assert (gimple_assign_lhs (use->stmt) == cand->var_after);
5443 step = cand->iv->step;
5444 ctype = TREE_TYPE (step);
5445 utype = TREE_TYPE (cand->var_after);
5446 if (TREE_CODE (step) == NEGATE_EXPR)
5448 incr_code = MINUS_EXPR;
5449 step = TREE_OPERAND (step, 0);
5452 /* Check whether we may leave the computation unchanged.
5453 This is the case only if it does not rely on other
5454 computations in the loop -- otherwise, the computation
5455 we rely upon may be removed in remove_unused_ivs,
5456 thus leading to ICE. */
5457 old_code = gimple_assign_rhs_code (use->stmt);
5458 if (old_code == PLUS_EXPR
5459 || old_code == MINUS_EXPR
5460 || old_code == POINTER_PLUS_EXPR)
5462 if (gimple_assign_rhs1 (use->stmt) == cand->var_before)
5463 op = gimple_assign_rhs2 (use->stmt);
5464 else if (old_code != MINUS_EXPR
5465 && gimple_assign_rhs2 (use->stmt) == cand->var_before)
5466 op = gimple_assign_rhs1 (use->stmt);
5467 else
5468 op = NULL_TREE;
5470 else
5471 op = NULL_TREE;
5473 if (op
5474 && (TREE_CODE (op) == INTEGER_CST
5475 || operand_equal_p (op, step, 0)))
5476 return;
5478 /* Otherwise, add the necessary computations to express
5479 the iv. */
5480 op = fold_convert (ctype, cand->var_before);
5481 comp = fold_convert (utype,
5482 build2 (incr_code, ctype, op,
5483 unshare_expr (step)));
5485 else
5487 comp = get_computation (data->current_loop, use, cand);
5488 gcc_assert (comp != NULL_TREE);
5491 switch (gimple_code (use->stmt))
5493 case GIMPLE_PHI:
5494 tgt = PHI_RESULT (use->stmt);
5496 /* If we should keep the biv, do not replace it. */
5497 if (name_info (data, tgt)->preserve_biv)
5498 return;
5500 bsi = gsi_after_labels (gimple_bb (use->stmt));
5501 break;
5503 case GIMPLE_ASSIGN:
5504 tgt = gimple_assign_lhs (use->stmt);
5505 bsi = gsi_for_stmt (use->stmt);
5506 break;
5508 default:
5509 gcc_unreachable ();
5512 if (!valid_gimple_rhs_p (comp)
5513 || (gimple_code (use->stmt) != GIMPLE_PHI
5514 /* We can't allow re-allocating the stmt as it might be pointed
5515 to still. */
5516 && (get_gimple_rhs_num_ops (TREE_CODE (comp))
5517 >= gimple_num_ops (gsi_stmt (bsi)))))
5519 comp = force_gimple_operand_gsi (&bsi, comp, true, NULL_TREE,
5520 true, GSI_SAME_STMT);
5521 if (POINTER_TYPE_P (TREE_TYPE (tgt)))
5522 duplicate_ssa_name_ptr_info (comp, SSA_NAME_PTR_INFO (tgt));
5525 if (gimple_code (use->stmt) == GIMPLE_PHI)
5527 ass = gimple_build_assign (tgt, comp);
5528 gsi_insert_before (&bsi, ass, GSI_SAME_STMT);
5530 bsi = gsi_for_stmt (use->stmt);
5531 remove_phi_node (&bsi, false);
5533 else
5535 gimple_assign_set_rhs_from_tree (&bsi, comp);
5536 use->stmt = gsi_stmt (bsi);
5540 /* Replaces ssa name in index IDX by its basic variable. Callback for
5541 for_each_index. */
5543 static bool
5544 idx_remove_ssa_names (tree base, tree *idx,
5545 void *data ATTRIBUTE_UNUSED)
5547 tree *op;
5549 if (TREE_CODE (*idx) == SSA_NAME)
5550 *idx = SSA_NAME_VAR (*idx);
5552 if (TREE_CODE (base) == ARRAY_REF || TREE_CODE (base) == ARRAY_RANGE_REF)
5554 op = &TREE_OPERAND (base, 2);
5555 if (*op
5556 && TREE_CODE (*op) == SSA_NAME)
5557 *op = SSA_NAME_VAR (*op);
5558 op = &TREE_OPERAND (base, 3);
5559 if (*op
5560 && TREE_CODE (*op) == SSA_NAME)
5561 *op = SSA_NAME_VAR (*op);
5564 return true;
5567 /* Unshares REF and replaces ssa names inside it by their basic variables. */
5569 static tree
5570 unshare_and_remove_ssa_names (tree ref)
5572 ref = unshare_expr (ref);
5573 for_each_index (&ref, idx_remove_ssa_names, NULL);
5575 return ref;
5578 /* Copies the reference information from OLD_REF to NEW_REF. */
5580 static void
5581 copy_ref_info (tree new_ref, tree old_ref)
5583 tree new_ptr_base = NULL_TREE;
5585 if (TREE_CODE (old_ref) == TARGET_MEM_REF
5586 && TREE_CODE (new_ref) == TARGET_MEM_REF)
5587 TMR_ORIGINAL (new_ref) = TMR_ORIGINAL (old_ref);
5588 else if (TREE_CODE (new_ref) == TARGET_MEM_REF)
5589 TMR_ORIGINAL (new_ref) = unshare_and_remove_ssa_names (old_ref);
5591 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
5592 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
5594 if (TREE_CODE (new_ref) == TARGET_MEM_REF)
5595 new_ptr_base = TMR_BASE (new_ref);
5596 else if (TREE_CODE (new_ref) == MEM_REF)
5597 new_ptr_base = TREE_OPERAND (new_ref, 0);
5599 /* We can transfer points-to information from an old pointer
5600 or decl base to the new one. */
5601 if (new_ptr_base
5602 && TREE_CODE (new_ptr_base) == SSA_NAME
5603 && POINTER_TYPE_P (TREE_TYPE (new_ptr_base))
5604 && !SSA_NAME_PTR_INFO (new_ptr_base))
5606 tree base = get_base_address (old_ref);
5607 if (!base)
5609 else if ((INDIRECT_REF_P (base)
5610 || TREE_CODE (base) == MEM_REF)
5611 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5612 duplicate_ssa_name_ptr_info
5613 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
5614 else if (TREE_CODE (base) == VAR_DECL
5615 || TREE_CODE (base) == PARM_DECL
5616 || TREE_CODE (base) == RESULT_DECL)
5618 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
5619 pt_solution_set_var (&pi->pt, base);
5624 /* Rewrites USE (address that is an iv) using candidate CAND. */
5626 static void
5627 rewrite_use_address (struct ivopts_data *data,
5628 struct iv_use *use, struct iv_cand *cand)
5630 aff_tree aff;
5631 gimple_stmt_iterator bsi = gsi_for_stmt (use->stmt);
5632 tree base_hint = NULL_TREE;
5633 tree ref;
5634 bool ok;
5636 ok = get_computation_aff (data->current_loop, use, cand, use->stmt, &aff);
5637 gcc_assert (ok);
5638 unshare_aff_combination (&aff);
5640 /* To avoid undefined overflow problems, all IV candidates use unsigned
5641 integer types. The drawback is that this makes it impossible for
5642 create_mem_ref to distinguish an IV that is based on a memory object
5643 from one that represents simply an offset.
5645 To work around this problem, we pass a hint to create_mem_ref that
5646 indicates which variable (if any) in aff is an IV based on a memory
5647 object. Note that we only consider the candidate. If this is not
5648 based on an object, the base of the reference is in some subexpression
5649 of the use -- but these will use pointer types, so they are recognized
5650 by the create_mem_ref heuristics anyway. */
5651 if (cand->iv->base_object)
5652 base_hint = var_at_stmt (data->current_loop, cand, use->stmt);
5654 ref = create_mem_ref (&bsi, TREE_TYPE (*use->op_p),
5655 reference_alias_ptr_type (*use->op_p),
5656 &aff, base_hint, data->speed);
5657 copy_ref_info (ref, *use->op_p);
5658 *use->op_p = ref;
5661 /* Rewrites USE (the condition such that one of the arguments is an iv) using
5662 candidate CAND. */
5664 static void
5665 rewrite_use_compare (struct ivopts_data *data,
5666 struct iv_use *use, struct iv_cand *cand)
5668 tree comp, *var_p, op, bound;
5669 gimple_stmt_iterator bsi = gsi_for_stmt (use->stmt);
5670 enum tree_code compare;
5671 struct cost_pair *cp = get_use_iv_cost (data, use, cand);
5672 bool ok;
5674 bound = cp->value;
5675 if (bound)
5677 tree var = var_at_stmt (data->current_loop, cand, use->stmt);
5678 tree var_type = TREE_TYPE (var);
5679 gimple_seq stmts;
5681 compare = iv_elimination_compare (data, use);
5682 bound = unshare_expr (fold_convert (var_type, bound));
5683 op = force_gimple_operand (bound, &stmts, true, NULL_TREE);
5684 if (stmts)
5685 gsi_insert_seq_on_edge_immediate (
5686 loop_preheader_edge (data->current_loop),
5687 stmts);
5689 gimple_cond_set_lhs (use->stmt, var);
5690 gimple_cond_set_code (use->stmt, compare);
5691 gimple_cond_set_rhs (use->stmt, op);
5692 return;
5695 /* The induction variable elimination failed; just express the original
5696 giv. */
5697 comp = get_computation (data->current_loop, use, cand);
5698 gcc_assert (comp != NULL_TREE);
5700 ok = extract_cond_operands (data, use->stmt, &var_p, NULL, NULL, NULL);
5701 gcc_assert (ok);
5703 *var_p = force_gimple_operand_gsi (&bsi, comp, true, SSA_NAME_VAR (*var_p),
5704 true, GSI_SAME_STMT);
5707 /* Rewrites USE using candidate CAND. */
5709 static void
5710 rewrite_use (struct ivopts_data *data, struct iv_use *use, struct iv_cand *cand)
5712 switch (use->type)
5714 case USE_NONLINEAR_EXPR:
5715 rewrite_use_nonlinear_expr (data, use, cand);
5716 break;
5718 case USE_ADDRESS:
5719 rewrite_use_address (data, use, cand);
5720 break;
5722 case USE_COMPARE:
5723 rewrite_use_compare (data, use, cand);
5724 break;
5726 default:
5727 gcc_unreachable ();
5730 update_stmt (use->stmt);
5733 /* Rewrite the uses using the selected induction variables. */
5735 static void
5736 rewrite_uses (struct ivopts_data *data)
5738 unsigned i;
5739 struct iv_cand *cand;
5740 struct iv_use *use;
5742 for (i = 0; i < n_iv_uses (data); i++)
5744 use = iv_use (data, i);
5745 cand = use->selected;
5746 gcc_assert (cand);
5748 rewrite_use (data, use, cand);
5752 /* Removes the ivs that are not used after rewriting. */
5754 static void
5755 remove_unused_ivs (struct ivopts_data *data)
5757 unsigned j;
5758 bitmap_iterator bi;
5759 bitmap toremove = BITMAP_ALLOC (NULL);
5761 /* Figure out an order in which to release SSA DEFs so that we don't
5762 release something that we'd have to propagate into a debug stmt
5763 afterwards. */
5764 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j, bi)
5766 struct version_info *info;
5768 info = ver_info (data, j);
5769 if (info->iv
5770 && !integer_zerop (info->iv->step)
5771 && !info->inv_id
5772 && !info->iv->have_use_for
5773 && !info->preserve_biv)
5774 bitmap_set_bit (toremove, SSA_NAME_VERSION (info->iv->ssa_name));
5777 release_defs_bitset (toremove);
5779 BITMAP_FREE (toremove);
5782 /* Frees data allocated by the optimization of a single loop. */
5784 static void
5785 free_loop_data (struct ivopts_data *data)
5787 unsigned i, j;
5788 bitmap_iterator bi;
5789 tree obj;
5791 if (data->niters)
5793 pointer_map_destroy (data->niters);
5794 data->niters = NULL;
5797 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
5799 struct version_info *info;
5801 info = ver_info (data, i);
5802 if (info->iv)
5803 free (info->iv);
5804 info->iv = NULL;
5805 info->has_nonlin_use = false;
5806 info->preserve_biv = false;
5807 info->inv_id = 0;
5809 bitmap_clear (data->relevant);
5810 bitmap_clear (data->important_candidates);
5812 for (i = 0; i < n_iv_uses (data); i++)
5814 struct iv_use *use = iv_use (data, i);
5816 free (use->iv);
5817 BITMAP_FREE (use->related_cands);
5818 for (j = 0; j < use->n_map_members; j++)
5819 if (use->cost_map[j].depends_on)
5820 BITMAP_FREE (use->cost_map[j].depends_on);
5821 free (use->cost_map);
5822 free (use);
5824 VEC_truncate (iv_use_p, data->iv_uses, 0);
5826 for (i = 0; i < n_iv_cands (data); i++)
5828 struct iv_cand *cand = iv_cand (data, i);
5830 if (cand->iv)
5831 free (cand->iv);
5832 if (cand->depends_on)
5833 BITMAP_FREE (cand->depends_on);
5834 free (cand);
5836 VEC_truncate (iv_cand_p, data->iv_candidates, 0);
5838 if (data->version_info_size < num_ssa_names)
5840 data->version_info_size = 2 * num_ssa_names;
5841 free (data->version_info);
5842 data->version_info = XCNEWVEC (struct version_info, data->version_info_size);
5845 data->max_inv_id = 0;
5847 for (i = 0; VEC_iterate (tree, decl_rtl_to_reset, i, obj); i++)
5848 SET_DECL_RTL (obj, NULL_RTX);
5850 VEC_truncate (tree, decl_rtl_to_reset, 0);
5853 /* Finalizes data structures used by the iv optimization pass. LOOPS is the
5854 loop tree. */
5856 static void
5857 tree_ssa_iv_optimize_finalize (struct ivopts_data *data)
5859 free_loop_data (data);
5860 free (data->version_info);
5861 BITMAP_FREE (data->relevant);
5862 BITMAP_FREE (data->important_candidates);
5864 VEC_free (tree, heap, decl_rtl_to_reset);
5865 VEC_free (iv_use_p, heap, data->iv_uses);
5866 VEC_free (iv_cand_p, heap, data->iv_candidates);
5869 /* Returns true if the loop body BODY includes any function calls. */
5871 static bool
5872 loop_body_includes_call (basic_block *body, unsigned num_nodes)
5874 gimple_stmt_iterator gsi;
5875 unsigned i;
5877 for (i = 0; i < num_nodes; i++)
5878 for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
5880 gimple stmt = gsi_stmt (gsi);
5881 if (is_gimple_call (stmt)
5882 && !is_inexpensive_builtin (gimple_call_fndecl (stmt)))
5883 return true;
5885 return false;
5888 /* Optimizes the LOOP. Returns true if anything changed. */
5890 static bool
5891 tree_ssa_iv_optimize_loop (struct ivopts_data *data, struct loop *loop)
5893 bool changed = false;
5894 struct iv_ca *iv_ca;
5895 edge exit;
5896 basic_block *body;
5898 gcc_assert (!data->niters);
5899 data->current_loop = loop;
5900 data->speed = optimize_loop_for_speed_p (loop);
5902 if (dump_file && (dump_flags & TDF_DETAILS))
5904 fprintf (dump_file, "Processing loop %d\n", loop->num);
5906 exit = single_dom_exit (loop);
5907 if (exit)
5909 fprintf (dump_file, " single exit %d -> %d, exit condition ",
5910 exit->src->index, exit->dest->index);
5911 print_gimple_stmt (dump_file, last_stmt (exit->src), 0, TDF_SLIM);
5912 fprintf (dump_file, "\n");
5915 fprintf (dump_file, "\n");
5918 body = get_loop_body (loop);
5919 data->body_includes_call = loop_body_includes_call (body, loop->num_nodes);
5920 renumber_gimple_stmt_uids_in_blocks (body, loop->num_nodes);
5921 free (body);
5923 /* For each ssa name determines whether it behaves as an induction variable
5924 in some loop. */
5925 if (!find_induction_variables (data))
5926 goto finish;
5928 /* Finds interesting uses (item 1). */
5929 find_interesting_uses (data);
5930 if (n_iv_uses (data) > MAX_CONSIDERED_USES)
5931 goto finish;
5933 /* Finds candidates for the induction variables (item 2). */
5934 find_iv_candidates (data);
5936 /* Calculates the costs (item 3, part 1). */
5937 determine_iv_costs (data);
5938 determine_use_iv_costs (data);
5939 determine_set_costs (data);
5941 /* Find the optimal set of induction variables (item 3, part 2). */
5942 iv_ca = find_optimal_iv_set (data);
5943 if (!iv_ca)
5944 goto finish;
5945 changed = true;
5947 /* Create the new induction variables (item 4, part 1). */
5948 create_new_ivs (data, iv_ca);
5949 iv_ca_free (&iv_ca);
5951 /* Rewrite the uses (item 4, part 2). */
5952 rewrite_uses (data);
5954 /* Remove the ivs that are unused after rewriting. */
5955 remove_unused_ivs (data);
5957 /* We have changed the structure of induction variables; it might happen
5958 that definitions in the scev database refer to some of them that were
5959 eliminated. */
5960 scev_reset ();
5962 finish:
5963 free_loop_data (data);
5965 return changed;
5968 /* Main entry point. Optimizes induction variables in loops. */
5970 void
5971 tree_ssa_iv_optimize (void)
5973 struct loop *loop;
5974 struct ivopts_data data;
5975 loop_iterator li;
5977 tree_ssa_iv_optimize_init (&data);
5979 /* Optimize the loops starting with the innermost ones. */
5980 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
5982 if (dump_file && (dump_flags & TDF_DETAILS))
5983 flow_loop_dump (loop, dump_file, NULL, 1);
5985 tree_ssa_iv_optimize_loop (&data, loop);
5988 tree_ssa_iv_optimize_finalize (&data);