PR middle-end/59175
[official-gcc.git] / gcc / tree-affine.c
blob25fd7f287e0e1414a6372f5fb994147573fb9e9c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
27 #include "gimple.h"
28 #include "gimplify.h"
29 #include "flags.h"
30 #include "dumpfile.h"
31 #include "cfgexpand.h"
33 /* Extends CST as appropriate for the affine combinations COMB. */
35 double_int
36 double_int_ext_for_comb (double_int cst, aff_tree *comb)
38 return cst.sext (TYPE_PRECISION (comb->type));
41 /* Initializes affine combination COMB so that its value is zero in TYPE. */
43 static void
44 aff_combination_zero (aff_tree *comb, tree type)
46 comb->type = type;
47 comb->offset = double_int_zero;
48 comb->n = 0;
49 comb->rest = NULL_TREE;
52 /* Sets COMB to CST. */
54 void
55 aff_combination_const (aff_tree *comb, tree type, double_int cst)
57 aff_combination_zero (comb, type);
58 comb->offset = double_int_ext_for_comb (cst, comb);
61 /* Sets COMB to single element ELT. */
63 void
64 aff_combination_elt (aff_tree *comb, tree type, tree elt)
66 aff_combination_zero (comb, type);
68 comb->n = 1;
69 comb->elts[0].val = elt;
70 comb->elts[0].coef = double_int_one;
73 /* Scales COMB by SCALE. */
75 void
76 aff_combination_scale (aff_tree *comb, double_int scale)
78 unsigned i, j;
80 scale = double_int_ext_for_comb (scale, comb);
81 if (scale.is_one ())
82 return;
84 if (scale.is_zero ())
86 aff_combination_zero (comb, comb->type);
87 return;
90 comb->offset
91 = double_int_ext_for_comb (scale * comb->offset, comb);
92 for (i = 0, j = 0; i < comb->n; i++)
94 double_int new_coef;
96 new_coef
97 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
98 /* A coefficient may become zero due to overflow. Remove the zero
99 elements. */
100 if (new_coef.is_zero ())
101 continue;
102 comb->elts[j].coef = new_coef;
103 comb->elts[j].val = comb->elts[i].val;
104 j++;
106 comb->n = j;
108 if (comb->rest)
110 tree type = comb->type;
111 if (POINTER_TYPE_P (type))
112 type = sizetype;
113 if (comb->n < MAX_AFF_ELTS)
115 comb->elts[comb->n].coef = scale;
116 comb->elts[comb->n].val = comb->rest;
117 comb->rest = NULL_TREE;
118 comb->n++;
120 else
121 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
122 double_int_to_tree (type, scale));
126 /* Adds ELT * SCALE to COMB. */
128 void
129 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
131 unsigned i;
132 tree type;
134 scale = double_int_ext_for_comb (scale, comb);
135 if (scale.is_zero ())
136 return;
138 for (i = 0; i < comb->n; i++)
139 if (operand_equal_p (comb->elts[i].val, elt, 0))
141 double_int new_coef;
143 new_coef = comb->elts[i].coef + scale;
144 new_coef = double_int_ext_for_comb (new_coef, comb);
145 if (!new_coef.is_zero ())
147 comb->elts[i].coef = new_coef;
148 return;
151 comb->n--;
152 comb->elts[i] = comb->elts[comb->n];
154 if (comb->rest)
156 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
157 comb->elts[comb->n].coef = double_int_one;
158 comb->elts[comb->n].val = comb->rest;
159 comb->rest = NULL_TREE;
160 comb->n++;
162 return;
164 if (comb->n < MAX_AFF_ELTS)
166 comb->elts[comb->n].coef = scale;
167 comb->elts[comb->n].val = elt;
168 comb->n++;
169 return;
172 type = comb->type;
173 if (POINTER_TYPE_P (type))
174 type = sizetype;
176 if (scale.is_one ())
177 elt = fold_convert (type, elt);
178 else
179 elt = fold_build2 (MULT_EXPR, type,
180 fold_convert (type, elt),
181 double_int_to_tree (type, scale));
183 if (comb->rest)
184 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
185 elt);
186 else
187 comb->rest = elt;
190 /* Adds CST to C. */
192 static void
193 aff_combination_add_cst (aff_tree *c, double_int cst)
195 c->offset = double_int_ext_for_comb (c->offset + cst, c);
198 /* Adds COMB2 to COMB1. */
200 void
201 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
203 unsigned i;
205 aff_combination_add_cst (comb1, comb2->offset);
206 for (i = 0; i < comb2->n; i++)
207 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
208 if (comb2->rest)
209 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
212 /* Converts affine combination COMB to TYPE. */
214 void
215 aff_combination_convert (aff_tree *comb, tree type)
217 unsigned i, j;
218 tree comb_type = comb->type;
220 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
222 tree val = fold_convert (type, aff_combination_to_tree (comb));
223 tree_to_aff_combination (val, type, comb);
224 return;
227 comb->type = type;
228 if (comb->rest && !POINTER_TYPE_P (type))
229 comb->rest = fold_convert (type, comb->rest);
231 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
232 return;
234 comb->offset = double_int_ext_for_comb (comb->offset, comb);
235 for (i = j = 0; i < comb->n; i++)
237 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
238 if (new_coef.is_zero ())
239 continue;
240 comb->elts[j].coef = new_coef;
241 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
242 j++;
245 comb->n = j;
246 if (comb->n < MAX_AFF_ELTS && comb->rest)
248 comb->elts[comb->n].coef = double_int_one;
249 comb->elts[comb->n].val = comb->rest;
250 comb->rest = NULL_TREE;
251 comb->n++;
255 /* Splits EXPR into an affine combination of parts. */
257 void
258 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
260 aff_tree tmp;
261 enum tree_code code;
262 tree cst, core, toffset;
263 HOST_WIDE_INT bitpos, bitsize;
264 enum machine_mode mode;
265 int unsignedp, volatilep;
267 STRIP_NOPS (expr);
269 code = TREE_CODE (expr);
270 switch (code)
272 case INTEGER_CST:
273 aff_combination_const (comb, type, tree_to_double_int (expr));
274 return;
276 case POINTER_PLUS_EXPR:
277 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
278 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
279 aff_combination_add (comb, &tmp);
280 return;
282 case PLUS_EXPR:
283 case MINUS_EXPR:
284 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
285 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
286 if (code == MINUS_EXPR)
287 aff_combination_scale (&tmp, double_int_minus_one);
288 aff_combination_add (comb, &tmp);
289 return;
291 case MULT_EXPR:
292 cst = TREE_OPERAND (expr, 1);
293 if (TREE_CODE (cst) != INTEGER_CST)
294 break;
295 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
296 aff_combination_scale (comb, tree_to_double_int (cst));
297 return;
299 case NEGATE_EXPR:
300 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
301 aff_combination_scale (comb, double_int_minus_one);
302 return;
304 case BIT_NOT_EXPR:
305 /* ~x = -x - 1 */
306 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
307 aff_combination_scale (comb, double_int_minus_one);
308 aff_combination_add_cst (comb, double_int_minus_one);
309 return;
311 case ADDR_EXPR:
312 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
313 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
315 expr = TREE_OPERAND (expr, 0);
316 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
317 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
318 aff_combination_add (comb, &tmp);
319 return;
321 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
322 &toffset, &mode, &unsignedp, &volatilep,
323 false);
324 if (bitpos % BITS_PER_UNIT != 0)
325 break;
326 aff_combination_const (comb, type,
327 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
328 core = build_fold_addr_expr (core);
329 if (TREE_CODE (core) == ADDR_EXPR)
330 aff_combination_add_elt (comb, core, double_int_one);
331 else
333 tree_to_aff_combination (core, type, &tmp);
334 aff_combination_add (comb, &tmp);
336 if (toffset)
338 tree_to_aff_combination (toffset, type, &tmp);
339 aff_combination_add (comb, &tmp);
341 return;
343 case MEM_REF:
344 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
345 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
346 type, comb);
347 else if (integer_zerop (TREE_OPERAND (expr, 1)))
349 aff_combination_elt (comb, type, expr);
350 return;
352 else
353 aff_combination_elt (comb, type,
354 build2 (MEM_REF, TREE_TYPE (expr),
355 TREE_OPERAND (expr, 0),
356 build_int_cst
357 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
358 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
359 aff_combination_add (comb, &tmp);
360 return;
362 default:
363 break;
366 aff_combination_elt (comb, type, expr);
369 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
370 combination COMB. */
372 static tree
373 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
374 aff_tree *comb)
376 enum tree_code code;
377 tree type1 = type;
378 if (POINTER_TYPE_P (type))
379 type1 = sizetype;
381 scale = double_int_ext_for_comb (scale, comb);
383 if (scale.is_minus_one ()
384 && POINTER_TYPE_P (TREE_TYPE (elt)))
386 elt = convert_to_ptrofftype (elt);
387 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
388 scale = double_int_one;
391 if (scale.is_one ())
393 if (!expr)
395 if (POINTER_TYPE_P (TREE_TYPE (elt)))
396 return elt;
397 else
398 return fold_convert (type1, elt);
401 if (POINTER_TYPE_P (TREE_TYPE (expr)))
402 return fold_build_pointer_plus (expr, elt);
403 if (POINTER_TYPE_P (TREE_TYPE (elt)))
404 return fold_build_pointer_plus (elt, expr);
405 return fold_build2 (PLUS_EXPR, type1,
406 expr, fold_convert (type1, elt));
409 if (scale.is_minus_one ())
411 if (!expr)
412 return fold_build1 (NEGATE_EXPR, type1,
413 fold_convert (type1, elt));
415 if (POINTER_TYPE_P (TREE_TYPE (expr)))
417 elt = convert_to_ptrofftype (elt);
418 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
419 return fold_build_pointer_plus (expr, elt);
421 return fold_build2 (MINUS_EXPR, type1,
422 expr, fold_convert (type1, elt));
425 elt = fold_convert (type1, elt);
426 if (!expr)
427 return fold_build2 (MULT_EXPR, type1, elt,
428 double_int_to_tree (type1, scale));
430 if (scale.is_negative ())
432 code = MINUS_EXPR;
433 scale = -scale;
435 else
436 code = PLUS_EXPR;
438 elt = fold_build2 (MULT_EXPR, type1, elt,
439 double_int_to_tree (type1, scale));
440 if (POINTER_TYPE_P (TREE_TYPE (expr)))
442 if (code == MINUS_EXPR)
443 elt = fold_build1 (NEGATE_EXPR, type1, elt);
444 return fold_build_pointer_plus (expr, elt);
446 return fold_build2 (code, type1, expr, elt);
449 /* Makes tree from the affine combination COMB. */
451 tree
452 aff_combination_to_tree (aff_tree *comb)
454 tree type = comb->type;
455 tree expr = NULL_TREE;
456 unsigned i;
457 double_int off, sgn;
458 tree type1 = type;
459 if (POINTER_TYPE_P (type))
460 type1 = sizetype;
462 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
464 for (i = 0; i < comb->n; i++)
465 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
466 comb);
468 if (comb->rest)
469 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
471 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
472 unsigned. */
473 if (comb->offset.is_negative ())
475 off = -comb->offset;
476 sgn = double_int_minus_one;
478 else
480 off = comb->offset;
481 sgn = double_int_one;
483 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
484 comb);
487 /* Copies the tree elements of COMB to ensure that they are not shared. */
489 void
490 unshare_aff_combination (aff_tree *comb)
492 unsigned i;
494 for (i = 0; i < comb->n; i++)
495 comb->elts[i].val = unshare_expr (comb->elts[i].val);
496 if (comb->rest)
497 comb->rest = unshare_expr (comb->rest);
500 /* Remove M-th element from COMB. */
502 void
503 aff_combination_remove_elt (aff_tree *comb, unsigned m)
505 comb->n--;
506 if (m <= comb->n)
507 comb->elts[m] = comb->elts[comb->n];
508 if (comb->rest)
510 comb->elts[comb->n].coef = double_int_one;
511 comb->elts[comb->n].val = comb->rest;
512 comb->rest = NULL_TREE;
513 comb->n++;
517 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
518 C * COEF is added to R. */
521 static void
522 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
523 aff_tree *r)
525 unsigned i;
526 tree aval, type;
528 for (i = 0; i < c->n; i++)
530 aval = c->elts[i].val;
531 if (val)
533 type = TREE_TYPE (aval);
534 aval = fold_build2 (MULT_EXPR, type, aval,
535 fold_convert (type, val));
538 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
541 if (c->rest)
543 aval = c->rest;
544 if (val)
546 type = TREE_TYPE (aval);
547 aval = fold_build2 (MULT_EXPR, type, aval,
548 fold_convert (type, val));
551 aff_combination_add_elt (r, aval, coef);
554 if (val)
555 aff_combination_add_elt (r, val, coef * c->offset);
556 else
557 aff_combination_add_cst (r, coef * c->offset);
560 /* Multiplies C1 by C2, storing the result to R */
562 void
563 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
565 unsigned i;
566 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
568 aff_combination_zero (r, c1->type);
570 for (i = 0; i < c2->n; i++)
571 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
572 if (c2->rest)
573 aff_combination_add_product (c1, double_int_one, c2->rest, r);
574 aff_combination_add_product (c1, c2->offset, NULL, r);
577 /* Returns the element of COMB whose value is VAL, or NULL if no such
578 element exists. If IDX is not NULL, it is set to the index of VAL in
579 COMB. */
581 static struct aff_comb_elt *
582 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
584 unsigned i;
586 for (i = 0; i < comb->n; i++)
587 if (operand_equal_p (comb->elts[i].val, val, 0))
589 if (idx)
590 *idx = i;
592 return &comb->elts[i];
595 return NULL;
598 /* Element of the cache that maps ssa name NAME to its expanded form
599 as an affine expression EXPANSION. */
601 struct name_expansion
603 aff_tree expansion;
605 /* True if the expansion for the name is just being generated. */
606 unsigned in_progress : 1;
609 /* Expands SSA names in COMB recursively. CACHE is used to cache the
610 results. */
612 void
613 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
614 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
616 unsigned i;
617 aff_tree to_add, current, curre;
618 tree e, rhs;
619 gimple def;
620 double_int scale;
621 void **slot;
622 struct name_expansion *exp;
624 aff_combination_zero (&to_add, comb->type);
625 for (i = 0; i < comb->n; i++)
627 tree type, name;
628 enum tree_code code;
630 e = comb->elts[i].val;
631 type = TREE_TYPE (e);
632 name = e;
633 /* Look through some conversions. */
634 if (TREE_CODE (e) == NOP_EXPR
635 && (TYPE_PRECISION (type)
636 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
637 name = TREE_OPERAND (e, 0);
638 if (TREE_CODE (name) != SSA_NAME)
639 continue;
640 def = SSA_NAME_DEF_STMT (name);
641 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
642 continue;
644 code = gimple_assign_rhs_code (def);
645 if (code != SSA_NAME
646 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
647 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
648 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
649 continue;
651 /* We do not know whether the reference retains its value at the
652 place where the expansion is used. */
653 if (TREE_CODE_CLASS (code) == tcc_reference)
654 continue;
656 if (!*cache)
657 *cache = pointer_map_create ();
658 slot = pointer_map_insert (*cache, e);
659 exp = (struct name_expansion *) *slot;
661 if (!exp)
663 exp = XNEW (struct name_expansion);
664 exp->in_progress = 1;
665 *slot = exp;
666 /* In principle this is a generally valid folding, but
667 it is not unconditionally an optimization, so do it
668 here and not in fold_unary. */
669 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
670 than the type of X and overflow for the type of X is
671 undefined. */
672 if (e != name
673 && INTEGRAL_TYPE_P (type)
674 && INTEGRAL_TYPE_P (TREE_TYPE (name))
675 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
676 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
677 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
678 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
679 rhs = fold_build2 (code, type,
680 fold_convert (type, gimple_assign_rhs1 (def)),
681 fold_convert (type, gimple_assign_rhs2 (def)));
682 else
684 rhs = gimple_assign_rhs_to_tree (def);
685 if (e != name)
686 rhs = fold_convert (type, rhs);
688 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
689 exp->expansion = current;
690 exp->in_progress = 0;
692 else
694 /* Since we follow the definitions in the SSA form, we should not
695 enter a cycle unless we pass through a phi node. */
696 gcc_assert (!exp->in_progress);
697 current = exp->expansion;
700 /* Accumulate the new terms to TO_ADD, so that we do not modify
701 COMB while traversing it; include the term -coef * E, to remove
702 it from COMB. */
703 scale = comb->elts[i].coef;
704 aff_combination_zero (&curre, comb->type);
705 aff_combination_add_elt (&curre, e, -scale);
706 aff_combination_scale (&current, scale);
707 aff_combination_add (&to_add, &current);
708 aff_combination_add (&to_add, &curre);
710 aff_combination_add (comb, &to_add);
713 /* Similar to tree_to_aff_combination, but follows SSA name definitions
714 and expands them recursively. CACHE is used to cache the expansions
715 of the ssa names, to avoid exponential time complexity for cases
716 like
718 a1 = a0 + a0;
719 a2 = a1 + a1;
720 a3 = a2 + a2;
721 ... */
723 void
724 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
725 struct pointer_map_t **cache)
727 tree_to_aff_combination (expr, type, comb);
728 aff_combination_expand (comb, cache);
731 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
732 pointer_map_traverse. */
734 static bool
735 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
736 void *data ATTRIBUTE_UNUSED)
738 struct name_expansion *const exp = (struct name_expansion *) *value;
740 free (exp);
741 return true;
744 /* Frees memory allocated for the CACHE used by
745 tree_to_aff_combination_expand. */
747 void
748 free_affine_expand_cache (struct pointer_map_t **cache)
750 if (!*cache)
751 return;
753 pointer_map_traverse (*cache, free_name_expansion, NULL);
754 pointer_map_destroy (*cache);
755 *cache = NULL;
758 /* If VAL != CST * DIV for any constant CST, returns false.
759 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
760 and if they are different, returns false. Finally, if neither of these
761 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
762 is set to true. */
764 static bool
765 double_int_constant_multiple_p (double_int val, double_int div,
766 bool *mult_set, double_int *mult)
768 double_int rem, cst;
770 if (val.is_zero ())
772 if (*mult_set && !mult->is_zero ())
773 return false;
774 *mult_set = true;
775 *mult = double_int_zero;
776 return true;
779 if (div.is_zero ())
780 return false;
782 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
783 if (!rem.is_zero ())
784 return false;
786 if (*mult_set && *mult != cst)
787 return false;
789 *mult_set = true;
790 *mult = cst;
791 return true;
794 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
795 X is stored to MULT. */
797 bool
798 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
799 double_int *mult)
801 bool mult_set = false;
802 unsigned i;
804 if (val->n == 0 && val->offset.is_zero ())
806 *mult = double_int_zero;
807 return true;
809 if (val->n != div->n)
810 return false;
812 if (val->rest || div->rest)
813 return false;
815 if (!double_int_constant_multiple_p (val->offset, div->offset,
816 &mult_set, mult))
817 return false;
819 for (i = 0; i < div->n; i++)
821 struct aff_comb_elt *elt
822 = aff_combination_find_elt (val, div->elts[i].val, NULL);
823 if (!elt)
824 return false;
825 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
826 &mult_set, mult))
827 return false;
830 gcc_assert (mult_set);
831 return true;
834 /* Prints the affine VAL to the FILE. */
836 static void
837 print_aff (FILE *file, aff_tree *val)
839 unsigned i;
840 bool uns = TYPE_UNSIGNED (val->type);
841 if (POINTER_TYPE_P (val->type))
842 uns = false;
843 fprintf (file, "{\n type = ");
844 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
845 fprintf (file, "\n offset = ");
846 dump_double_int (file, val->offset, uns);
847 if (val->n > 0)
849 fprintf (file, "\n elements = {\n");
850 for (i = 0; i < val->n; i++)
852 fprintf (file, " [%d] = ", i);
853 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
855 fprintf (file, " * ");
856 dump_double_int (file, val->elts[i].coef, uns);
857 if (i != val->n - 1)
858 fprintf (file, ", \n");
860 fprintf (file, "\n }");
862 if (val->rest)
864 fprintf (file, "\n rest = ");
865 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
867 fprintf (file, "\n}");
870 /* Prints the affine VAL to the standard error, used for debugging. */
872 DEBUG_FUNCTION void
873 debug_aff (aff_tree *val)
875 print_aff (stderr, val);
876 fprintf (stderr, "\n");
879 /* Computes address of the reference REF in ADDR. The size of the accessed
880 location is stored to SIZE. Returns the ultimate containing object to
881 which REF refers. */
883 tree
884 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
886 HOST_WIDE_INT bitsize, bitpos;
887 tree toff;
888 enum machine_mode mode;
889 int uns, vol;
890 aff_tree tmp;
891 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
892 &uns, &vol, false);
893 tree base_addr = build_fold_addr_expr (base);
895 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
897 tree_to_aff_combination (base_addr, sizetype, addr);
899 if (toff)
901 tree_to_aff_combination (toff, sizetype, &tmp);
902 aff_combination_add (addr, &tmp);
905 aff_combination_const (&tmp, sizetype,
906 double_int::from_shwi (bitpos / BITS_PER_UNIT));
907 aff_combination_add (addr, &tmp);
909 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
911 return base;
914 /* Returns true if a region of size SIZE1 at position 0 and a region of
915 size SIZE2 at position DIFF cannot overlap. */
917 bool
918 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
920 double_int d, bound;
922 /* Unless the difference is a constant, we fail. */
923 if (diff->n != 0)
924 return false;
926 d = diff->offset;
927 if (d.is_negative ())
929 /* The second object is before the first one, we succeed if the last
930 element of the second object is before the start of the first one. */
931 bound = d + size2 + double_int_minus_one;
932 return bound.is_negative ();
934 else
936 /* We succeed if the second object starts after the first one ends. */
937 return size1.sle (d);