ChangeLog entry:
[official-gcc.git] / gcc / tree-affine.c
blob7bb1645d4255f34fffab67e7f27ef5337ecca8ec
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "output.h"
25 #include "tree-pretty-print.h"
26 #include "tree-dump.h"
27 #include "pointer-set.h"
28 #include "tree-affine.h"
29 #include "gimple.h"
30 #include "flags.h"
32 /* Extends CST as appropriate for the affine combinations COMB. */
34 double_int
35 double_int_ext_for_comb (double_int cst, aff_tree *comb)
37 return double_int_sext (cst, TYPE_PRECISION (comb->type));
40 /* Initializes affine combination COMB so that its value is zero in TYPE. */
42 static void
43 aff_combination_zero (aff_tree *comb, tree type)
45 comb->type = type;
46 comb->offset = double_int_zero;
47 comb->n = 0;
48 comb->rest = NULL_TREE;
51 /* Sets COMB to CST. */
53 void
54 aff_combination_const (aff_tree *comb, tree type, double_int cst)
56 aff_combination_zero (comb, type);
57 comb->offset = double_int_ext_for_comb (cst, comb);
60 /* Sets COMB to single element ELT. */
62 void
63 aff_combination_elt (aff_tree *comb, tree type, tree elt)
65 aff_combination_zero (comb, type);
67 comb->n = 1;
68 comb->elts[0].val = elt;
69 comb->elts[0].coef = double_int_one;
72 /* Scales COMB by SCALE. */
74 void
75 aff_combination_scale (aff_tree *comb, double_int scale)
77 unsigned i, j;
79 scale = double_int_ext_for_comb (scale, comb);
80 if (double_int_one_p (scale))
81 return;
83 if (double_int_zero_p (scale))
85 aff_combination_zero (comb, comb->type);
86 return;
89 comb->offset
90 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
91 for (i = 0, j = 0; i < comb->n; i++)
93 double_int new_coef;
95 new_coef
96 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
97 comb);
98 /* A coefficient may become zero due to overflow. Remove the zero
99 elements. */
100 if (double_int_zero_p (new_coef))
101 continue;
102 comb->elts[j].coef = new_coef;
103 comb->elts[j].val = comb->elts[i].val;
104 j++;
106 comb->n = j;
108 if (comb->rest)
110 tree type = comb->type;
111 if (POINTER_TYPE_P (type))
112 type = sizetype;
113 if (comb->n < MAX_AFF_ELTS)
115 comb->elts[comb->n].coef = scale;
116 comb->elts[comb->n].val = comb->rest;
117 comb->rest = NULL_TREE;
118 comb->n++;
120 else
121 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
122 double_int_to_tree (type, scale));
126 /* Adds ELT * SCALE to COMB. */
128 void
129 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
131 unsigned i;
132 tree type;
134 scale = double_int_ext_for_comb (scale, comb);
135 if (double_int_zero_p (scale))
136 return;
138 for (i = 0; i < comb->n; i++)
139 if (operand_equal_p (comb->elts[i].val, elt, 0))
141 double_int new_coef;
143 new_coef = double_int_add (comb->elts[i].coef, scale);
144 new_coef = double_int_ext_for_comb (new_coef, comb);
145 if (!double_int_zero_p (new_coef))
147 comb->elts[i].coef = new_coef;
148 return;
151 comb->n--;
152 comb->elts[i] = comb->elts[comb->n];
154 if (comb->rest)
156 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
157 comb->elts[comb->n].coef = double_int_one;
158 comb->elts[comb->n].val = comb->rest;
159 comb->rest = NULL_TREE;
160 comb->n++;
162 return;
164 if (comb->n < MAX_AFF_ELTS)
166 comb->elts[comb->n].coef = scale;
167 comb->elts[comb->n].val = elt;
168 comb->n++;
169 return;
172 type = comb->type;
173 if (POINTER_TYPE_P (type))
174 type = sizetype;
176 if (double_int_one_p (scale))
177 elt = fold_convert (type, elt);
178 else
179 elt = fold_build2 (MULT_EXPR, type,
180 fold_convert (type, elt),
181 double_int_to_tree (type, scale));
183 if (comb->rest)
184 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
185 elt);
186 else
187 comb->rest = elt;
190 /* Adds CST to C. */
192 static void
193 aff_combination_add_cst (aff_tree *c, double_int cst)
195 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
198 /* Adds COMB2 to COMB1. */
200 void
201 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
203 unsigned i;
205 aff_combination_add_cst (comb1, comb2->offset);
206 for (i = 0; i < comb2->n; i++)
207 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
208 if (comb2->rest)
209 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
212 /* Converts affine combination COMB to TYPE. */
214 void
215 aff_combination_convert (aff_tree *comb, tree type)
217 unsigned i, j;
218 tree comb_type = comb->type;
220 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
222 tree val = fold_convert (type, aff_combination_to_tree (comb));
223 tree_to_aff_combination (val, type, comb);
224 return;
227 comb->type = type;
228 if (comb->rest && !POINTER_TYPE_P (type))
229 comb->rest = fold_convert (type, comb->rest);
231 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
232 return;
234 comb->offset = double_int_ext_for_comb (comb->offset, comb);
235 for (i = j = 0; i < comb->n; i++)
237 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
238 if (double_int_zero_p (new_coef))
239 continue;
240 comb->elts[j].coef = new_coef;
241 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
242 j++;
245 comb->n = j;
246 if (comb->n < MAX_AFF_ELTS && comb->rest)
248 comb->elts[comb->n].coef = double_int_one;
249 comb->elts[comb->n].val = comb->rest;
250 comb->rest = NULL_TREE;
251 comb->n++;
255 /* Splits EXPR into an affine combination of parts. */
257 void
258 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
260 aff_tree tmp;
261 enum tree_code code;
262 tree cst, core, toffset;
263 HOST_WIDE_INT bitpos, bitsize;
264 enum machine_mode mode;
265 int unsignedp, volatilep;
267 STRIP_NOPS (expr);
269 code = TREE_CODE (expr);
270 switch (code)
272 case INTEGER_CST:
273 aff_combination_const (comb, type, tree_to_double_int (expr));
274 return;
276 case POINTER_PLUS_EXPR:
277 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
278 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
279 aff_combination_add (comb, &tmp);
280 return;
282 case PLUS_EXPR:
283 case MINUS_EXPR:
284 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
285 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
286 if (code == MINUS_EXPR)
287 aff_combination_scale (&tmp, double_int_minus_one);
288 aff_combination_add (comb, &tmp);
289 return;
291 case MULT_EXPR:
292 cst = TREE_OPERAND (expr, 1);
293 if (TREE_CODE (cst) != INTEGER_CST)
294 break;
295 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
296 aff_combination_scale (comb, tree_to_double_int (cst));
297 return;
299 case NEGATE_EXPR:
300 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
301 aff_combination_scale (comb, double_int_minus_one);
302 return;
304 case BIT_NOT_EXPR:
305 /* ~x = -x - 1 */
306 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
307 aff_combination_scale (comb, double_int_minus_one);
308 aff_combination_add_cst (comb, double_int_minus_one);
309 return;
311 case ADDR_EXPR:
312 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
313 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
315 expr = TREE_OPERAND (expr, 0);
316 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
317 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
318 aff_combination_add (comb, &tmp);
319 return;
321 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
322 &toffset, &mode, &unsignedp, &volatilep,
323 false);
324 if (bitpos % BITS_PER_UNIT != 0)
325 break;
326 aff_combination_const (comb, type,
327 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
328 core = build_fold_addr_expr (core);
329 if (TREE_CODE (core) == ADDR_EXPR)
330 aff_combination_add_elt (comb, core, double_int_one);
331 else
333 tree_to_aff_combination (core, type, &tmp);
334 aff_combination_add (comb, &tmp);
336 if (toffset)
338 tree_to_aff_combination (toffset, type, &tmp);
339 aff_combination_add (comb, &tmp);
341 return;
343 case MEM_REF:
344 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
345 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
346 type, comb);
347 else if (integer_zerop (TREE_OPERAND (expr, 1)))
349 aff_combination_elt (comb, type, expr);
350 return;
352 else
353 aff_combination_elt (comb, type,
354 build2 (MEM_REF, TREE_TYPE (expr),
355 TREE_OPERAND (expr, 0),
356 build_int_cst
357 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
358 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
359 aff_combination_add (comb, &tmp);
360 return;
362 default:
363 break;
366 aff_combination_elt (comb, type, expr);
369 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
370 combination COMB. */
372 static tree
373 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
374 aff_tree *comb)
376 enum tree_code code;
377 tree type1 = type;
378 if (POINTER_TYPE_P (type))
379 type1 = sizetype;
381 scale = double_int_ext_for_comb (scale, comb);
382 elt = fold_convert (type1, elt);
384 if (double_int_one_p (scale))
386 if (!expr)
387 return fold_convert (type, elt);
389 if (POINTER_TYPE_P (type))
390 return fold_build_pointer_plus (expr, elt);
391 return fold_build2 (PLUS_EXPR, type, expr, elt);
394 if (double_int_minus_one_p (scale))
396 if (!expr)
397 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
399 if (POINTER_TYPE_P (type))
401 elt = fold_build1 (NEGATE_EXPR, type1, elt);
402 return fold_build_pointer_plus (expr, elt);
404 return fold_build2 (MINUS_EXPR, type, expr, elt);
407 if (!expr)
408 return fold_convert (type,
409 fold_build2 (MULT_EXPR, type1, elt,
410 double_int_to_tree (type1, scale)));
412 if (double_int_negative_p (scale))
414 code = MINUS_EXPR;
415 scale = double_int_neg (scale);
417 else
418 code = PLUS_EXPR;
420 elt = fold_build2 (MULT_EXPR, type1, elt,
421 double_int_to_tree (type1, scale));
422 if (POINTER_TYPE_P (type))
424 if (code == MINUS_EXPR)
425 elt = fold_build1 (NEGATE_EXPR, type1, elt);
426 return fold_build_pointer_plus (expr, elt);
428 return fold_build2 (code, type, expr, elt);
431 /* Makes tree from the affine combination COMB. */
433 tree
434 aff_combination_to_tree (aff_tree *comb)
436 tree type = comb->type;
437 tree expr = NULL_TREE;
438 unsigned i;
439 double_int off, sgn;
440 tree type1 = type;
441 if (POINTER_TYPE_P (type))
442 type1 = sizetype;
444 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
446 for (i = 0; i < comb->n; i++)
447 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
448 comb);
450 if (comb->rest)
451 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
453 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
454 unsigned. */
455 if (double_int_negative_p (comb->offset))
457 off = double_int_neg (comb->offset);
458 sgn = double_int_minus_one;
460 else
462 off = comb->offset;
463 sgn = double_int_one;
465 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
466 comb);
469 /* Copies the tree elements of COMB to ensure that they are not shared. */
471 void
472 unshare_aff_combination (aff_tree *comb)
474 unsigned i;
476 for (i = 0; i < comb->n; i++)
477 comb->elts[i].val = unshare_expr (comb->elts[i].val);
478 if (comb->rest)
479 comb->rest = unshare_expr (comb->rest);
482 /* Remove M-th element from COMB. */
484 void
485 aff_combination_remove_elt (aff_tree *comb, unsigned m)
487 comb->n--;
488 if (m <= comb->n)
489 comb->elts[m] = comb->elts[comb->n];
490 if (comb->rest)
492 comb->elts[comb->n].coef = double_int_one;
493 comb->elts[comb->n].val = comb->rest;
494 comb->rest = NULL_TREE;
495 comb->n++;
499 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
500 C * COEF is added to R. */
503 static void
504 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
505 aff_tree *r)
507 unsigned i;
508 tree aval, type;
510 for (i = 0; i < c->n; i++)
512 aval = c->elts[i].val;
513 if (val)
515 type = TREE_TYPE (aval);
516 aval = fold_build2 (MULT_EXPR, type, aval,
517 fold_convert (type, val));
520 aff_combination_add_elt (r, aval,
521 double_int_mul (coef, c->elts[i].coef));
524 if (c->rest)
526 aval = c->rest;
527 if (val)
529 type = TREE_TYPE (aval);
530 aval = fold_build2 (MULT_EXPR, type, aval,
531 fold_convert (type, val));
534 aff_combination_add_elt (r, aval, coef);
537 if (val)
538 aff_combination_add_elt (r, val,
539 double_int_mul (coef, c->offset));
540 else
541 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
544 /* Multiplies C1 by C2, storing the result to R */
546 void
547 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
549 unsigned i;
550 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
552 aff_combination_zero (r, c1->type);
554 for (i = 0; i < c2->n; i++)
555 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
556 if (c2->rest)
557 aff_combination_add_product (c1, double_int_one, c2->rest, r);
558 aff_combination_add_product (c1, c2->offset, NULL, r);
561 /* Returns the element of COMB whose value is VAL, or NULL if no such
562 element exists. If IDX is not NULL, it is set to the index of VAL in
563 COMB. */
565 static struct aff_comb_elt *
566 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
568 unsigned i;
570 for (i = 0; i < comb->n; i++)
571 if (operand_equal_p (comb->elts[i].val, val, 0))
573 if (idx)
574 *idx = i;
576 return &comb->elts[i];
579 return NULL;
582 /* Element of the cache that maps ssa name NAME to its expanded form
583 as an affine expression EXPANSION. */
585 struct name_expansion
587 aff_tree expansion;
589 /* True if the expansion for the name is just being generated. */
590 unsigned in_progress : 1;
593 /* Expands SSA names in COMB recursively. CACHE is used to cache the
594 results. */
596 void
597 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
598 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
600 unsigned i;
601 aff_tree to_add, current, curre;
602 tree e, rhs;
603 gimple def;
604 double_int scale;
605 void **slot;
606 struct name_expansion *exp;
608 aff_combination_zero (&to_add, comb->type);
609 for (i = 0; i < comb->n; i++)
611 tree type, name;
612 enum tree_code code;
614 e = comb->elts[i].val;
615 type = TREE_TYPE (e);
616 name = e;
617 /* Look through some conversions. */
618 if (TREE_CODE (e) == NOP_EXPR
619 && (TYPE_PRECISION (type)
620 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
621 name = TREE_OPERAND (e, 0);
622 if (TREE_CODE (name) != SSA_NAME)
623 continue;
624 def = SSA_NAME_DEF_STMT (name);
625 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
626 continue;
628 code = gimple_assign_rhs_code (def);
629 if (code != SSA_NAME
630 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
631 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
632 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
633 continue;
635 /* We do not know whether the reference retains its value at the
636 place where the expansion is used. */
637 if (TREE_CODE_CLASS (code) == tcc_reference)
638 continue;
640 if (!*cache)
641 *cache = pointer_map_create ();
642 slot = pointer_map_insert (*cache, e);
643 exp = (struct name_expansion *) *slot;
645 if (!exp)
647 exp = XNEW (struct name_expansion);
648 exp->in_progress = 1;
649 *slot = exp;
650 /* In principle this is a generally valid folding, but
651 it is not unconditionally an optimization, so do it
652 here and not in fold_unary. */
653 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
654 than the type of X and overflow for the type of X is
655 undefined. */
656 if (e != name
657 && INTEGRAL_TYPE_P (type)
658 && INTEGRAL_TYPE_P (TREE_TYPE (name))
659 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
660 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
661 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
662 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
663 rhs = fold_build2 (code, type,
664 fold_convert (type, gimple_assign_rhs1 (def)),
665 fold_convert (type, gimple_assign_rhs2 (def)));
666 else
668 rhs = gimple_assign_rhs_to_tree (def);
669 if (e != name)
670 rhs = fold_convert (type, rhs);
672 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
673 exp->expansion = current;
674 exp->in_progress = 0;
676 else
678 /* Since we follow the definitions in the SSA form, we should not
679 enter a cycle unless we pass through a phi node. */
680 gcc_assert (!exp->in_progress);
681 current = exp->expansion;
684 /* Accumulate the new terms to TO_ADD, so that we do not modify
685 COMB while traversing it; include the term -coef * E, to remove
686 it from COMB. */
687 scale = comb->elts[i].coef;
688 aff_combination_zero (&curre, comb->type);
689 aff_combination_add_elt (&curre, e, double_int_neg (scale));
690 aff_combination_scale (&current, scale);
691 aff_combination_add (&to_add, &current);
692 aff_combination_add (&to_add, &curre);
694 aff_combination_add (comb, &to_add);
697 /* Similar to tree_to_aff_combination, but follows SSA name definitions
698 and expands them recursively. CACHE is used to cache the expansions
699 of the ssa names, to avoid exponential time complexity for cases
700 like
702 a1 = a0 + a0;
703 a2 = a1 + a1;
704 a3 = a2 + a2;
705 ... */
707 void
708 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
709 struct pointer_map_t **cache)
711 tree_to_aff_combination (expr, type, comb);
712 aff_combination_expand (comb, cache);
715 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
716 pointer_map_traverse. */
718 static bool
719 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
720 void *data ATTRIBUTE_UNUSED)
722 struct name_expansion *const exp = (struct name_expansion *) *value;
724 free (exp);
725 return true;
728 /* Frees memory allocated for the CACHE used by
729 tree_to_aff_combination_expand. */
731 void
732 free_affine_expand_cache (struct pointer_map_t **cache)
734 if (!*cache)
735 return;
737 pointer_map_traverse (*cache, free_name_expansion, NULL);
738 pointer_map_destroy (*cache);
739 *cache = NULL;
742 /* If VAL != CST * DIV for any constant CST, returns false.
743 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
744 additionally compares CST and MULT, and if they are different,
745 returns false. Finally, if neither of these two cases occur,
746 true is returned, and if CST != 0, CST is stored to MULT and
747 MULT_SET is set to true. */
749 static bool
750 double_int_constant_multiple_p (double_int val, double_int div,
751 bool *mult_set, double_int *mult)
753 double_int rem, cst;
755 if (double_int_zero_p (val))
756 return true;
758 if (double_int_zero_p (div))
759 return false;
761 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
762 if (!double_int_zero_p (rem))
763 return false;
765 if (*mult_set && !double_int_equal_p (*mult, cst))
766 return false;
768 *mult_set = true;
769 *mult = cst;
770 return true;
773 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
774 X is stored to MULT. */
776 bool
777 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
778 double_int *mult)
780 bool mult_set = false;
781 unsigned i;
783 if (val->n == 0 && double_int_zero_p (val->offset))
785 *mult = double_int_zero;
786 return true;
788 if (val->n != div->n)
789 return false;
791 if (val->rest || div->rest)
792 return false;
794 if (!double_int_constant_multiple_p (val->offset, div->offset,
795 &mult_set, mult))
796 return false;
798 for (i = 0; i < div->n; i++)
800 struct aff_comb_elt *elt
801 = aff_combination_find_elt (val, div->elts[i].val, NULL);
802 if (!elt)
803 return false;
804 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
805 &mult_set, mult))
806 return false;
809 gcc_assert (mult_set);
810 return true;
813 /* Prints the affine VAL to the FILE. */
815 static void
816 print_aff (FILE *file, aff_tree *val)
818 unsigned i;
819 bool uns = TYPE_UNSIGNED (val->type);
820 if (POINTER_TYPE_P (val->type))
821 uns = false;
822 fprintf (file, "{\n type = ");
823 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
824 fprintf (file, "\n offset = ");
825 dump_double_int (file, val->offset, uns);
826 if (val->n > 0)
828 fprintf (file, "\n elements = {\n");
829 for (i = 0; i < val->n; i++)
831 fprintf (file, " [%d] = ", i);
832 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
834 fprintf (file, " * ");
835 dump_double_int (file, val->elts[i].coef, uns);
836 if (i != val->n - 1)
837 fprintf (file, ", \n");
839 fprintf (file, "\n }");
841 if (val->rest)
843 fprintf (file, "\n rest = ");
844 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
846 fprintf (file, "\n}");
849 /* Prints the affine VAL to the standard error, used for debugging. */
851 DEBUG_FUNCTION void
852 debug_aff (aff_tree *val)
854 print_aff (stderr, val);
855 fprintf (stderr, "\n");
858 /* Returns address of the reference REF in ADDR. The size of the accessed
859 location is stored to SIZE. */
861 void
862 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
864 HOST_WIDE_INT bitsize, bitpos;
865 tree toff;
866 enum machine_mode mode;
867 int uns, vol;
868 aff_tree tmp;
869 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
870 &uns, &vol, false);
871 tree base_addr = build_fold_addr_expr (base);
873 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
875 tree_to_aff_combination (base_addr, sizetype, addr);
877 if (toff)
879 tree_to_aff_combination (toff, sizetype, &tmp);
880 aff_combination_add (addr, &tmp);
883 aff_combination_const (&tmp, sizetype,
884 shwi_to_double_int (bitpos / BITS_PER_UNIT));
885 aff_combination_add (addr, &tmp);
887 *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
890 /* Returns true if a region of size SIZE1 at position 0 and a region of
891 size SIZE2 at position DIFF cannot overlap. */
893 bool
894 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
896 double_int d, bound;
898 /* Unless the difference is a constant, we fail. */
899 if (diff->n != 0)
900 return false;
902 d = diff->offset;
903 if (double_int_negative_p (d))
905 /* The second object is before the first one, we succeed if the last
906 element of the second object is before the start of the first one. */
907 bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
908 return double_int_negative_p (bound);
910 else
912 /* We succeed if the second object starts after the first one ends. */
913 return double_int_scmp (size1, d) <= 0;