2007-05-27 H.J. Lu <hongjiu.lu@intel.com>
[official-gcc.git] / gcc / tree-affine.c
bloba25d55111694d1222ff67bc65c1277f94698b366
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "output.h"
30 #include "diagnostic.h"
31 #include "tree-dump.h"
32 #include "pointer-set.h"
33 #include "tree-affine.h"
34 #include "tree-gimple.h"
36 /* Extends CST as appropriate for the affine combinations COMB. */
38 double_int
39 double_int_ext_for_comb (double_int cst, aff_tree *comb)
41 return double_int_sext (cst, TYPE_PRECISION (comb->type));
44 /* Initializes affine combination COMB so that its value is zero in TYPE. */
46 static void
47 aff_combination_zero (aff_tree *comb, tree type)
49 comb->type = type;
50 comb->offset = double_int_zero;
51 comb->n = 0;
52 comb->rest = NULL_TREE;
55 /* Sets COMB to CST. */
57 void
58 aff_combination_const (aff_tree *comb, tree type, double_int cst)
60 aff_combination_zero (comb, type);
61 comb->offset = double_int_ext_for_comb (cst, comb);
64 /* Sets COMB to single element ELT. */
66 void
67 aff_combination_elt (aff_tree *comb, tree type, tree elt)
69 aff_combination_zero (comb, type);
71 comb->n = 1;
72 comb->elts[0].val = elt;
73 comb->elts[0].coef = double_int_one;
76 /* Scales COMB by SCALE. */
78 void
79 aff_combination_scale (aff_tree *comb, double_int scale)
81 unsigned i, j;
83 scale = double_int_ext_for_comb (scale, comb);
84 if (double_int_one_p (scale))
85 return;
87 if (double_int_zero_p (scale))
89 aff_combination_zero (comb, comb->type);
90 return;
93 comb->offset
94 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
95 for (i = 0, j = 0; i < comb->n; i++)
97 double_int new_coef;
99 new_coef
100 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
101 comb);
102 /* A coefficient may become zero due to overflow. Remove the zero
103 elements. */
104 if (double_int_zero_p (new_coef))
105 continue;
106 comb->elts[j].coef = new_coef;
107 comb->elts[j].val = comb->elts[i].val;
108 j++;
110 comb->n = j;
112 if (comb->rest)
114 if (comb->n < MAX_AFF_ELTS)
116 comb->elts[comb->n].coef = scale;
117 comb->elts[comb->n].val = comb->rest;
118 comb->rest = NULL_TREE;
119 comb->n++;
121 else
122 comb->rest = fold_build2 (MULT_EXPR, comb->type, comb->rest,
123 double_int_to_tree (comb->type, scale));
127 /* Adds ELT * SCALE to COMB. */
129 void
130 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
132 unsigned i;
134 scale = double_int_ext_for_comb (scale, comb);
135 if (double_int_zero_p (scale))
136 return;
138 for (i = 0; i < comb->n; i++)
139 if (operand_equal_p (comb->elts[i].val, elt, 0))
141 double_int new_coef;
143 new_coef = double_int_add (comb->elts[i].coef, scale);
144 new_coef = double_int_ext_for_comb (new_coef, comb);
145 if (!double_int_zero_p (new_coef))
147 comb->elts[i].coef = new_coef;
148 return;
151 comb->n--;
152 comb->elts[i] = comb->elts[comb->n];
154 if (comb->rest)
156 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
157 comb->elts[comb->n].coef = double_int_one;
158 comb->elts[comb->n].val = comb->rest;
159 comb->rest = NULL_TREE;
160 comb->n++;
162 return;
164 if (comb->n < MAX_AFF_ELTS)
166 comb->elts[comb->n].coef = scale;
167 comb->elts[comb->n].val = elt;
168 comb->n++;
169 return;
172 if (double_int_one_p (scale))
173 elt = fold_convert (comb->type, elt);
174 else
175 elt = fold_build2 (MULT_EXPR, comb->type,
176 fold_convert (comb->type, elt),
177 double_int_to_tree (comb->type, scale));
179 if (comb->rest)
180 comb->rest = fold_build2 (PLUS_EXPR, comb->type, comb->rest, elt);
181 else
182 comb->rest = elt;
185 /* Adds CST to C. */
187 static void
188 aff_combination_add_cst (aff_tree *c, double_int cst)
190 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
193 /* Adds COMB2 to COMB1. */
195 void
196 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
198 unsigned i;
200 aff_combination_add_cst (comb1, comb2->offset);
201 for (i = 0; i < comb2->n; i++)
202 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
203 if (comb2->rest)
204 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
207 /* Converts affine combination COMB to TYPE. */
209 void
210 aff_combination_convert (aff_tree *comb, tree type)
212 unsigned i, j;
213 tree comb_type = comb->type;
215 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
217 tree val = fold_convert (type, aff_combination_to_tree (comb));
218 tree_to_aff_combination (val, type, comb);
219 return;
222 comb->type = type;
223 if (comb->rest)
224 comb->rest = fold_convert (type, comb->rest);
226 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
227 return;
229 comb->offset = double_int_ext_for_comb (comb->offset, comb);
230 for (i = j = 0; i < comb->n; i++)
232 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
233 if (double_int_zero_p (new_coef))
234 continue;
235 comb->elts[j].coef = new_coef;
236 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
237 j++;
240 comb->n = j;
241 if (comb->n < MAX_AFF_ELTS && comb->rest)
243 comb->elts[comb->n].coef = double_int_one;
244 comb->elts[comb->n].val = comb->rest;
245 comb->rest = NULL_TREE;
246 comb->n++;
250 /* Splits EXPR into an affine combination of parts. */
252 void
253 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
255 aff_tree tmp;
256 enum tree_code code;
257 tree cst, core, toffset;
258 HOST_WIDE_INT bitpos, bitsize;
259 enum machine_mode mode;
260 int unsignedp, volatilep;
262 STRIP_NOPS (expr);
264 code = TREE_CODE (expr);
265 switch (code)
267 case INTEGER_CST:
268 aff_combination_const (comb, type, tree_to_double_int (expr));
269 return;
271 case PLUS_EXPR:
272 case MINUS_EXPR:
273 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
274 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
275 if (code == MINUS_EXPR)
276 aff_combination_scale (&tmp, double_int_minus_one);
277 aff_combination_add (comb, &tmp);
278 return;
280 case MULT_EXPR:
281 cst = TREE_OPERAND (expr, 1);
282 if (TREE_CODE (cst) != INTEGER_CST)
283 break;
284 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
285 aff_combination_scale (comb, tree_to_double_int (cst));
286 return;
288 case NEGATE_EXPR:
289 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
290 aff_combination_scale (comb, double_int_minus_one);
291 return;
293 case BIT_NOT_EXPR:
294 /* ~x = -x - 1 */
295 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
296 aff_combination_scale (comb, double_int_minus_one);
297 aff_combination_add_cst (comb, double_int_minus_one);
298 return;
300 case ADDR_EXPR:
301 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
302 &toffset, &mode, &unsignedp, &volatilep,
303 false);
304 if (bitpos % BITS_PER_UNIT != 0)
305 break;
306 aff_combination_const (comb, type,
307 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
308 core = build_fold_addr_expr (core);
309 if (TREE_CODE (core) == ADDR_EXPR)
310 aff_combination_add_elt (comb, core, double_int_one);
311 else
313 tree_to_aff_combination (core, type, &tmp);
314 aff_combination_add (comb, &tmp);
316 if (toffset)
318 tree_to_aff_combination (toffset, type, &tmp);
319 aff_combination_add (comb, &tmp);
321 return;
323 default:
324 break;
327 aff_combination_elt (comb, type, expr);
330 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
331 combination COMB. */
333 static tree
334 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
335 aff_tree *comb)
337 enum tree_code code;
339 scale = double_int_ext_for_comb (scale, comb);
340 elt = fold_convert (type, elt);
342 if (double_int_one_p (scale))
344 if (!expr)
345 return elt;
347 return fold_build2 (PLUS_EXPR, type, expr, elt);
350 if (double_int_minus_one_p (scale))
352 if (!expr)
353 return fold_build1 (NEGATE_EXPR, type, elt);
355 return fold_build2 (MINUS_EXPR, type, expr, elt);
358 if (!expr)
359 return fold_build2 (MULT_EXPR, type, elt,
360 double_int_to_tree (type, scale));
362 if (double_int_negative_p (scale))
364 code = MINUS_EXPR;
365 scale = double_int_neg (scale);
367 else
368 code = PLUS_EXPR;
370 elt = fold_build2 (MULT_EXPR, type, elt,
371 double_int_to_tree (type, scale));
372 return fold_build2 (code, type, expr, elt);
375 /* Makes tree from the affine combination COMB. */
377 tree
378 aff_combination_to_tree (aff_tree *comb)
380 tree type = comb->type;
381 tree expr = comb->rest;
382 unsigned i;
383 double_int off, sgn;
385 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
387 for (i = 0; i < comb->n; i++)
388 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
389 comb);
391 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
392 unsigned. */
393 if (double_int_negative_p (comb->offset))
395 off = double_int_neg (comb->offset);
396 sgn = double_int_minus_one;
398 else
400 off = comb->offset;
401 sgn = double_int_one;
403 return add_elt_to_tree (expr, type, double_int_to_tree (type, off), sgn,
404 comb);
407 /* Copies the tree elements of COMB to ensure that they are not shared. */
409 void
410 unshare_aff_combination (aff_tree *comb)
412 unsigned i;
414 for (i = 0; i < comb->n; i++)
415 comb->elts[i].val = unshare_expr (comb->elts[i].val);
416 if (comb->rest)
417 comb->rest = unshare_expr (comb->rest);
420 /* Remove M-th element from COMB. */
422 void
423 aff_combination_remove_elt (aff_tree *comb, unsigned m)
425 comb->n--;
426 if (m <= comb->n)
427 comb->elts[m] = comb->elts[comb->n];
428 if (comb->rest)
430 comb->elts[comb->n].coef = double_int_one;
431 comb->elts[comb->n].val = comb->rest;
432 comb->rest = NULL_TREE;
433 comb->n++;
437 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
438 C * COEF is added to R. */
441 static void
442 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
443 aff_tree *r)
445 unsigned i;
446 tree aval, type;
448 for (i = 0; i < c->n; i++)
450 aval = c->elts[i].val;
451 if (val)
453 type = TREE_TYPE (aval);
454 aval = fold_build2 (MULT_EXPR, type, aval,
455 fold_convert (type, val));
458 aff_combination_add_elt (r, aval,
459 double_int_mul (coef, c->elts[i].coef));
462 if (c->rest)
464 aval = c->rest;
465 if (val)
467 type = TREE_TYPE (aval);
468 aval = fold_build2 (MULT_EXPR, type, aval,
469 fold_convert (type, val));
472 aff_combination_add_elt (r, aval, coef);
475 if (val)
476 aff_combination_add_elt (r, val,
477 double_int_mul (coef, c->offset));
478 else
479 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
482 /* Multiplies C1 by C2, storing the result to R */
484 void
485 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
487 unsigned i;
488 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
490 aff_combination_zero (r, c1->type);
492 for (i = 0; i < c2->n; i++)
493 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
494 if (c2->rest)
495 aff_combination_add_product (c1, double_int_one, c2->rest, r);
496 aff_combination_add_product (c1, c2->offset, NULL, r);
499 /* Returns the element of COMB whose value is VAL, or NULL if no such
500 element exists. If IDX is not NULL, it is set to the index of VAL in
501 COMB. */
503 static struct aff_comb_elt *
504 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
506 unsigned i;
508 for (i = 0; i < comb->n; i++)
509 if (operand_equal_p (comb->elts[i].val, val, 0))
511 if (idx)
512 *idx = i;
514 return &comb->elts[i];
517 return NULL;
520 /* Element of the cache that maps ssa name NAME to its expanded form
521 as an affine expression EXPANSION. */
523 struct name_expansion
525 aff_tree expansion;
527 /* True if the expansion for the name is just being generated. */
528 unsigned in_progress : 1;
531 /* Similar to tree_to_aff_combination, but follows SSA name definitions
532 and expands them recursively. CACHE is used to cache the expansions
533 of the ssa names, to avoid exponential time complexity for cases
534 like
536 a1 = a0 + a0;
537 a2 = a1 + a1;
538 a3 = a2 + a2;
539 ... */
541 void
542 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
543 struct pointer_map_t **cache)
545 unsigned i;
546 aff_tree to_add, current, curre;
547 tree e, def, rhs;
548 double_int scale;
549 void **slot;
550 struct name_expansion *exp;
552 tree_to_aff_combination (expr, type, comb);
553 aff_combination_zero (&to_add, type);
554 for (i = 0; i < comb->n; i++)
556 e = comb->elts[i].val;
557 if (TREE_CODE (e) != SSA_NAME)
558 continue;
559 def = SSA_NAME_DEF_STMT (e);
560 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
561 || GIMPLE_STMT_OPERAND (def, 0) != e)
562 continue;
564 rhs = GIMPLE_STMT_OPERAND (def, 1);
565 if (TREE_CODE (rhs) != SSA_NAME
566 && !EXPR_P (rhs)
567 && !is_gimple_min_invariant (rhs))
568 continue;
570 /* We do not know whether the reference retains its value at the
571 place where the expansion is used. */
572 if (REFERENCE_CLASS_P (rhs))
573 continue;
575 if (!*cache)
576 *cache = pointer_map_create ();
577 slot = pointer_map_insert (*cache, e);
578 exp = *slot;
580 if (!exp)
582 exp = XNEW (struct name_expansion);
583 exp->in_progress = 1;
584 *slot = exp;
585 tree_to_aff_combination_expand (rhs, type, &current, cache);
586 exp->expansion = current;
587 exp->in_progress = 0;
589 else
591 /* Since we follow the definitions in the SSA form, we should not
592 enter a cycle unless we pass through a phi node. */
593 gcc_assert (!exp->in_progress);
594 current = exp->expansion;
597 /* Accumulate the new terms to TO_ADD, so that we do not modify
598 COMB while traversing it; include the term -coef * E, to remove
599 it from COMB. */
600 scale = comb->elts[i].coef;
601 aff_combination_zero (&curre, type);
602 aff_combination_add_elt (&curre, e, double_int_neg (scale));
603 aff_combination_scale (&current, scale);
604 aff_combination_add (&to_add, &current);
605 aff_combination_add (&to_add, &curre);
607 aff_combination_add (comb, &to_add);
610 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
611 pointer_map_traverse. */
613 static bool
614 free_name_expansion (void *key ATTRIBUTE_UNUSED, void **value,
615 void *data ATTRIBUTE_UNUSED)
617 struct name_expansion *exp = *value;
619 free (exp);
620 return true;
623 /* Frees memory allocated for the CACHE used by
624 tree_to_aff_combination_expand. */
626 void
627 free_affine_expand_cache (struct pointer_map_t **cache)
629 if (!*cache)
630 return;
632 pointer_map_traverse (*cache, free_name_expansion, NULL);
633 pointer_map_destroy (*cache);
634 *cache = NULL;
637 /* If VAL != CST * DIV for any constant CST, returns false.
638 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
639 additionally compares CST and MULT, and if they are different,
640 returns false. Finally, if neither of these two cases occur,
641 true is returned, and if CST != 0, CST is stored to MULT and
642 MULT_SET is set to true. */
644 static bool
645 double_int_constant_multiple_p (double_int val, double_int div,
646 bool *mult_set, double_int *mult)
648 double_int rem, cst;
650 if (double_int_zero_p (val))
651 return true;
653 if (double_int_zero_p (div))
654 return false;
656 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
657 if (!double_int_zero_p (rem))
658 return false;
660 if (*mult_set && !double_int_equal_p (*mult, cst))
661 return false;
663 *mult_set = true;
664 *mult = cst;
665 return true;
668 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
669 X is stored to MULT. */
671 bool
672 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
673 double_int *mult)
675 bool mult_set = false;
676 unsigned i;
678 if (val->n == 0 && double_int_zero_p (val->offset))
680 *mult = double_int_zero;
681 return true;
683 if (val->n != div->n)
684 return false;
686 if (val->rest || div->rest)
687 return false;
689 if (!double_int_constant_multiple_p (val->offset, div->offset,
690 &mult_set, mult))
691 return false;
693 for (i = 0; i < div->n; i++)
695 struct aff_comb_elt *elt
696 = aff_combination_find_elt (val, div->elts[i].val, NULL);
697 if (!elt)
698 return false;
699 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
700 &mult_set, mult))
701 return false;
704 gcc_assert (mult_set);
705 return true;