* Makefile.in (C_COMMON_OBJS): Depend on c-cilkplus.o.
[official-gcc.git] / gcc / tree-affine.c
blobd6d5686ef159a72ad4e1902bf7927a2e0c733431
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
27 #include "gimple.h"
28 #include "gimplify.h"
29 #include "flags.h"
30 #include "dumpfile.h"
32 /* Extends CST as appropriate for the affine combinations COMB. */
34 double_int
35 double_int_ext_for_comb (double_int cst, aff_tree *comb)
37 return cst.sext (TYPE_PRECISION (comb->type));
40 /* Initializes affine combination COMB so that its value is zero in TYPE. */
42 static void
43 aff_combination_zero (aff_tree *comb, tree type)
45 comb->type = type;
46 comb->offset = double_int_zero;
47 comb->n = 0;
48 comb->rest = NULL_TREE;
51 /* Sets COMB to CST. */
53 void
54 aff_combination_const (aff_tree *comb, tree type, double_int cst)
56 aff_combination_zero (comb, type);
57 comb->offset = double_int_ext_for_comb (cst, comb);
60 /* Sets COMB to single element ELT. */
62 void
63 aff_combination_elt (aff_tree *comb, tree type, tree elt)
65 aff_combination_zero (comb, type);
67 comb->n = 1;
68 comb->elts[0].val = elt;
69 comb->elts[0].coef = double_int_one;
72 /* Scales COMB by SCALE. */
74 void
75 aff_combination_scale (aff_tree *comb, double_int scale)
77 unsigned i, j;
79 scale = double_int_ext_for_comb (scale, comb);
80 if (scale.is_one ())
81 return;
83 if (scale.is_zero ())
85 aff_combination_zero (comb, comb->type);
86 return;
89 comb->offset
90 = double_int_ext_for_comb (scale * comb->offset, comb);
91 for (i = 0, j = 0; i < comb->n; i++)
93 double_int new_coef;
95 new_coef
96 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
97 /* A coefficient may become zero due to overflow. Remove the zero
98 elements. */
99 if (new_coef.is_zero ())
100 continue;
101 comb->elts[j].coef = new_coef;
102 comb->elts[j].val = comb->elts[i].val;
103 j++;
105 comb->n = j;
107 if (comb->rest)
109 tree type = comb->type;
110 if (POINTER_TYPE_P (type))
111 type = sizetype;
112 if (comb->n < MAX_AFF_ELTS)
114 comb->elts[comb->n].coef = scale;
115 comb->elts[comb->n].val = comb->rest;
116 comb->rest = NULL_TREE;
117 comb->n++;
119 else
120 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
121 double_int_to_tree (type, scale));
125 /* Adds ELT * SCALE to COMB. */
127 void
128 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
130 unsigned i;
131 tree type;
133 scale = double_int_ext_for_comb (scale, comb);
134 if (scale.is_zero ())
135 return;
137 for (i = 0; i < comb->n; i++)
138 if (operand_equal_p (comb->elts[i].val, elt, 0))
140 double_int new_coef;
142 new_coef = comb->elts[i].coef + scale;
143 new_coef = double_int_ext_for_comb (new_coef, comb);
144 if (!new_coef.is_zero ())
146 comb->elts[i].coef = new_coef;
147 return;
150 comb->n--;
151 comb->elts[i] = comb->elts[comb->n];
153 if (comb->rest)
155 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
156 comb->elts[comb->n].coef = double_int_one;
157 comb->elts[comb->n].val = comb->rest;
158 comb->rest = NULL_TREE;
159 comb->n++;
161 return;
163 if (comb->n < MAX_AFF_ELTS)
165 comb->elts[comb->n].coef = scale;
166 comb->elts[comb->n].val = elt;
167 comb->n++;
168 return;
171 type = comb->type;
172 if (POINTER_TYPE_P (type))
173 type = sizetype;
175 if (scale.is_one ())
176 elt = fold_convert (type, elt);
177 else
178 elt = fold_build2 (MULT_EXPR, type,
179 fold_convert (type, elt),
180 double_int_to_tree (type, scale));
182 if (comb->rest)
183 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
184 elt);
185 else
186 comb->rest = elt;
189 /* Adds CST to C. */
191 static void
192 aff_combination_add_cst (aff_tree *c, double_int cst)
194 c->offset = double_int_ext_for_comb (c->offset + cst, c);
197 /* Adds COMB2 to COMB1. */
199 void
200 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
202 unsigned i;
204 aff_combination_add_cst (comb1, comb2->offset);
205 for (i = 0; i < comb2->n; i++)
206 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
207 if (comb2->rest)
208 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
211 /* Converts affine combination COMB to TYPE. */
213 void
214 aff_combination_convert (aff_tree *comb, tree type)
216 unsigned i, j;
217 tree comb_type = comb->type;
219 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
221 tree val = fold_convert (type, aff_combination_to_tree (comb));
222 tree_to_aff_combination (val, type, comb);
223 return;
226 comb->type = type;
227 if (comb->rest && !POINTER_TYPE_P (type))
228 comb->rest = fold_convert (type, comb->rest);
230 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
231 return;
233 comb->offset = double_int_ext_for_comb (comb->offset, comb);
234 for (i = j = 0; i < comb->n; i++)
236 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
237 if (new_coef.is_zero ())
238 continue;
239 comb->elts[j].coef = new_coef;
240 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
241 j++;
244 comb->n = j;
245 if (comb->n < MAX_AFF_ELTS && comb->rest)
247 comb->elts[comb->n].coef = double_int_one;
248 comb->elts[comb->n].val = comb->rest;
249 comb->rest = NULL_TREE;
250 comb->n++;
254 /* Splits EXPR into an affine combination of parts. */
256 void
257 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
259 aff_tree tmp;
260 enum tree_code code;
261 tree cst, core, toffset;
262 HOST_WIDE_INT bitpos, bitsize;
263 enum machine_mode mode;
264 int unsignedp, volatilep;
266 STRIP_NOPS (expr);
268 code = TREE_CODE (expr);
269 switch (code)
271 case INTEGER_CST:
272 aff_combination_const (comb, type, tree_to_double_int (expr));
273 return;
275 case POINTER_PLUS_EXPR:
276 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
277 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
278 aff_combination_add (comb, &tmp);
279 return;
281 case PLUS_EXPR:
282 case MINUS_EXPR:
283 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
284 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
285 if (code == MINUS_EXPR)
286 aff_combination_scale (&tmp, double_int_minus_one);
287 aff_combination_add (comb, &tmp);
288 return;
290 case MULT_EXPR:
291 cst = TREE_OPERAND (expr, 1);
292 if (TREE_CODE (cst) != INTEGER_CST)
293 break;
294 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
295 aff_combination_scale (comb, tree_to_double_int (cst));
296 return;
298 case NEGATE_EXPR:
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300 aff_combination_scale (comb, double_int_minus_one);
301 return;
303 case BIT_NOT_EXPR:
304 /* ~x = -x - 1 */
305 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
306 aff_combination_scale (comb, double_int_minus_one);
307 aff_combination_add_cst (comb, double_int_minus_one);
308 return;
310 case ADDR_EXPR:
311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
312 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
314 expr = TREE_OPERAND (expr, 0);
315 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
316 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
317 aff_combination_add (comb, &tmp);
318 return;
320 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
321 &toffset, &mode, &unsignedp, &volatilep,
322 false);
323 if (bitpos % BITS_PER_UNIT != 0)
324 break;
325 aff_combination_const (comb, type,
326 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
327 core = build_fold_addr_expr (core);
328 if (TREE_CODE (core) == ADDR_EXPR)
329 aff_combination_add_elt (comb, core, double_int_one);
330 else
332 tree_to_aff_combination (core, type, &tmp);
333 aff_combination_add (comb, &tmp);
335 if (toffset)
337 tree_to_aff_combination (toffset, type, &tmp);
338 aff_combination_add (comb, &tmp);
340 return;
342 case MEM_REF:
343 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
344 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
345 type, comb);
346 else if (integer_zerop (TREE_OPERAND (expr, 1)))
348 aff_combination_elt (comb, type, expr);
349 return;
351 else
352 aff_combination_elt (comb, type,
353 build2 (MEM_REF, TREE_TYPE (expr),
354 TREE_OPERAND (expr, 0),
355 build_int_cst
356 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
357 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
358 aff_combination_add (comb, &tmp);
359 return;
361 default:
362 break;
365 aff_combination_elt (comb, type, expr);
368 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
369 combination COMB. */
371 static tree
372 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
373 aff_tree *comb)
375 enum tree_code code;
376 tree type1 = type;
377 if (POINTER_TYPE_P (type))
378 type1 = sizetype;
380 scale = double_int_ext_for_comb (scale, comb);
382 if (scale.is_minus_one ()
383 && POINTER_TYPE_P (TREE_TYPE (elt)))
385 elt = convert_to_ptrofftype (elt);
386 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
387 scale = double_int_one;
390 if (scale.is_one ())
392 if (!expr)
394 if (POINTER_TYPE_P (TREE_TYPE (elt)))
395 return elt;
396 else
397 return fold_convert (type1, elt);
400 if (POINTER_TYPE_P (TREE_TYPE (expr)))
401 return fold_build_pointer_plus (expr, elt);
402 if (POINTER_TYPE_P (TREE_TYPE (elt)))
403 return fold_build_pointer_plus (elt, expr);
404 return fold_build2 (PLUS_EXPR, type1,
405 expr, fold_convert (type1, elt));
408 if (scale.is_minus_one ())
410 if (!expr)
411 return fold_build1 (NEGATE_EXPR, type1,
412 fold_convert (type1, elt));
414 if (POINTER_TYPE_P (TREE_TYPE (expr)))
416 elt = convert_to_ptrofftype (elt);
417 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
418 return fold_build_pointer_plus (expr, elt);
420 return fold_build2 (MINUS_EXPR, type1,
421 expr, fold_convert (type1, elt));
424 elt = fold_convert (type1, elt);
425 if (!expr)
426 return fold_build2 (MULT_EXPR, type1, elt,
427 double_int_to_tree (type1, scale));
429 if (scale.is_negative ())
431 code = MINUS_EXPR;
432 scale = -scale;
434 else
435 code = PLUS_EXPR;
437 elt = fold_build2 (MULT_EXPR, type1, elt,
438 double_int_to_tree (type1, scale));
439 if (POINTER_TYPE_P (TREE_TYPE (expr)))
441 if (code == MINUS_EXPR)
442 elt = fold_build1 (NEGATE_EXPR, type1, elt);
443 return fold_build_pointer_plus (expr, elt);
445 return fold_build2 (code, type1, expr, elt);
448 /* Makes tree from the affine combination COMB. */
450 tree
451 aff_combination_to_tree (aff_tree *comb)
453 tree type = comb->type;
454 tree expr = NULL_TREE;
455 unsigned i;
456 double_int off, sgn;
457 tree type1 = type;
458 if (POINTER_TYPE_P (type))
459 type1 = sizetype;
461 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
463 for (i = 0; i < comb->n; i++)
464 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
465 comb);
467 if (comb->rest)
468 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
470 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
471 unsigned. */
472 if (comb->offset.is_negative ())
474 off = -comb->offset;
475 sgn = double_int_minus_one;
477 else
479 off = comb->offset;
480 sgn = double_int_one;
482 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
483 comb);
486 /* Copies the tree elements of COMB to ensure that they are not shared. */
488 void
489 unshare_aff_combination (aff_tree *comb)
491 unsigned i;
493 for (i = 0; i < comb->n; i++)
494 comb->elts[i].val = unshare_expr (comb->elts[i].val);
495 if (comb->rest)
496 comb->rest = unshare_expr (comb->rest);
499 /* Remove M-th element from COMB. */
501 void
502 aff_combination_remove_elt (aff_tree *comb, unsigned m)
504 comb->n--;
505 if (m <= comb->n)
506 comb->elts[m] = comb->elts[comb->n];
507 if (comb->rest)
509 comb->elts[comb->n].coef = double_int_one;
510 comb->elts[comb->n].val = comb->rest;
511 comb->rest = NULL_TREE;
512 comb->n++;
516 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
517 C * COEF is added to R. */
520 static void
521 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
522 aff_tree *r)
524 unsigned i;
525 tree aval, type;
527 for (i = 0; i < c->n; i++)
529 aval = c->elts[i].val;
530 if (val)
532 type = TREE_TYPE (aval);
533 aval = fold_build2 (MULT_EXPR, type, aval,
534 fold_convert (type, val));
537 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
540 if (c->rest)
542 aval = c->rest;
543 if (val)
545 type = TREE_TYPE (aval);
546 aval = fold_build2 (MULT_EXPR, type, aval,
547 fold_convert (type, val));
550 aff_combination_add_elt (r, aval, coef);
553 if (val)
554 aff_combination_add_elt (r, val, coef * c->offset);
555 else
556 aff_combination_add_cst (r, coef * c->offset);
559 /* Multiplies C1 by C2, storing the result to R */
561 void
562 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
564 unsigned i;
565 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
567 aff_combination_zero (r, c1->type);
569 for (i = 0; i < c2->n; i++)
570 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
571 if (c2->rest)
572 aff_combination_add_product (c1, double_int_one, c2->rest, r);
573 aff_combination_add_product (c1, c2->offset, NULL, r);
576 /* Returns the element of COMB whose value is VAL, or NULL if no such
577 element exists. If IDX is not NULL, it is set to the index of VAL in
578 COMB. */
580 static struct aff_comb_elt *
581 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
583 unsigned i;
585 for (i = 0; i < comb->n; i++)
586 if (operand_equal_p (comb->elts[i].val, val, 0))
588 if (idx)
589 *idx = i;
591 return &comb->elts[i];
594 return NULL;
597 /* Element of the cache that maps ssa name NAME to its expanded form
598 as an affine expression EXPANSION. */
600 struct name_expansion
602 aff_tree expansion;
604 /* True if the expansion for the name is just being generated. */
605 unsigned in_progress : 1;
608 /* Expands SSA names in COMB recursively. CACHE is used to cache the
609 results. */
611 void
612 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
613 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
615 unsigned i;
616 aff_tree to_add, current, curre;
617 tree e, rhs;
618 gimple def;
619 double_int scale;
620 void **slot;
621 struct name_expansion *exp;
623 aff_combination_zero (&to_add, comb->type);
624 for (i = 0; i < comb->n; i++)
626 tree type, name;
627 enum tree_code code;
629 e = comb->elts[i].val;
630 type = TREE_TYPE (e);
631 name = e;
632 /* Look through some conversions. */
633 if (TREE_CODE (e) == NOP_EXPR
634 && (TYPE_PRECISION (type)
635 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
636 name = TREE_OPERAND (e, 0);
637 if (TREE_CODE (name) != SSA_NAME)
638 continue;
639 def = SSA_NAME_DEF_STMT (name);
640 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
641 continue;
643 code = gimple_assign_rhs_code (def);
644 if (code != SSA_NAME
645 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
646 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
647 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
648 continue;
650 /* We do not know whether the reference retains its value at the
651 place where the expansion is used. */
652 if (TREE_CODE_CLASS (code) == tcc_reference)
653 continue;
655 if (!*cache)
656 *cache = pointer_map_create ();
657 slot = pointer_map_insert (*cache, e);
658 exp = (struct name_expansion *) *slot;
660 if (!exp)
662 exp = XNEW (struct name_expansion);
663 exp->in_progress = 1;
664 *slot = exp;
665 /* In principle this is a generally valid folding, but
666 it is not unconditionally an optimization, so do it
667 here and not in fold_unary. */
668 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
669 than the type of X and overflow for the type of X is
670 undefined. */
671 if (e != name
672 && INTEGRAL_TYPE_P (type)
673 && INTEGRAL_TYPE_P (TREE_TYPE (name))
674 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
675 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
676 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
677 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
678 rhs = fold_build2 (code, type,
679 fold_convert (type, gimple_assign_rhs1 (def)),
680 fold_convert (type, gimple_assign_rhs2 (def)));
681 else
683 rhs = gimple_assign_rhs_to_tree (def);
684 if (e != name)
685 rhs = fold_convert (type, rhs);
687 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
688 exp->expansion = current;
689 exp->in_progress = 0;
691 else
693 /* Since we follow the definitions in the SSA form, we should not
694 enter a cycle unless we pass through a phi node. */
695 gcc_assert (!exp->in_progress);
696 current = exp->expansion;
699 /* Accumulate the new terms to TO_ADD, so that we do not modify
700 COMB while traversing it; include the term -coef * E, to remove
701 it from COMB. */
702 scale = comb->elts[i].coef;
703 aff_combination_zero (&curre, comb->type);
704 aff_combination_add_elt (&curre, e, -scale);
705 aff_combination_scale (&current, scale);
706 aff_combination_add (&to_add, &current);
707 aff_combination_add (&to_add, &curre);
709 aff_combination_add (comb, &to_add);
712 /* Similar to tree_to_aff_combination, but follows SSA name definitions
713 and expands them recursively. CACHE is used to cache the expansions
714 of the ssa names, to avoid exponential time complexity for cases
715 like
717 a1 = a0 + a0;
718 a2 = a1 + a1;
719 a3 = a2 + a2;
720 ... */
722 void
723 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
724 struct pointer_map_t **cache)
726 tree_to_aff_combination (expr, type, comb);
727 aff_combination_expand (comb, cache);
730 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
731 pointer_map_traverse. */
733 static bool
734 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
735 void *data ATTRIBUTE_UNUSED)
737 struct name_expansion *const exp = (struct name_expansion *) *value;
739 free (exp);
740 return true;
743 /* Frees memory allocated for the CACHE used by
744 tree_to_aff_combination_expand. */
746 void
747 free_affine_expand_cache (struct pointer_map_t **cache)
749 if (!*cache)
750 return;
752 pointer_map_traverse (*cache, free_name_expansion, NULL);
753 pointer_map_destroy (*cache);
754 *cache = NULL;
757 /* If VAL != CST * DIV for any constant CST, returns false.
758 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
759 and if they are different, returns false. Finally, if neither of these
760 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
761 is set to true. */
763 static bool
764 double_int_constant_multiple_p (double_int val, double_int div,
765 bool *mult_set, double_int *mult)
767 double_int rem, cst;
769 if (val.is_zero ())
771 if (*mult_set && !mult->is_zero ())
772 return false;
773 *mult_set = true;
774 *mult = double_int_zero;
775 return true;
778 if (div.is_zero ())
779 return false;
781 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
782 if (!rem.is_zero ())
783 return false;
785 if (*mult_set && *mult != cst)
786 return false;
788 *mult_set = true;
789 *mult = cst;
790 return true;
793 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
794 X is stored to MULT. */
796 bool
797 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
798 double_int *mult)
800 bool mult_set = false;
801 unsigned i;
803 if (val->n == 0 && val->offset.is_zero ())
805 *mult = double_int_zero;
806 return true;
808 if (val->n != div->n)
809 return false;
811 if (val->rest || div->rest)
812 return false;
814 if (!double_int_constant_multiple_p (val->offset, div->offset,
815 &mult_set, mult))
816 return false;
818 for (i = 0; i < div->n; i++)
820 struct aff_comb_elt *elt
821 = aff_combination_find_elt (val, div->elts[i].val, NULL);
822 if (!elt)
823 return false;
824 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
825 &mult_set, mult))
826 return false;
829 gcc_assert (mult_set);
830 return true;
833 /* Prints the affine VAL to the FILE. */
835 static void
836 print_aff (FILE *file, aff_tree *val)
838 unsigned i;
839 bool uns = TYPE_UNSIGNED (val->type);
840 if (POINTER_TYPE_P (val->type))
841 uns = false;
842 fprintf (file, "{\n type = ");
843 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
844 fprintf (file, "\n offset = ");
845 dump_double_int (file, val->offset, uns);
846 if (val->n > 0)
848 fprintf (file, "\n elements = {\n");
849 for (i = 0; i < val->n; i++)
851 fprintf (file, " [%d] = ", i);
852 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
854 fprintf (file, " * ");
855 dump_double_int (file, val->elts[i].coef, uns);
856 if (i != val->n - 1)
857 fprintf (file, ", \n");
859 fprintf (file, "\n }");
861 if (val->rest)
863 fprintf (file, "\n rest = ");
864 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
866 fprintf (file, "\n}");
869 /* Prints the affine VAL to the standard error, used for debugging. */
871 DEBUG_FUNCTION void
872 debug_aff (aff_tree *val)
874 print_aff (stderr, val);
875 fprintf (stderr, "\n");
878 /* Computes address of the reference REF in ADDR. The size of the accessed
879 location is stored to SIZE. Returns the ultimate containing object to
880 which REF refers. */
882 tree
883 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
885 HOST_WIDE_INT bitsize, bitpos;
886 tree toff;
887 enum machine_mode mode;
888 int uns, vol;
889 aff_tree tmp;
890 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
891 &uns, &vol, false);
892 tree base_addr = build_fold_addr_expr (base);
894 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
896 tree_to_aff_combination (base_addr, sizetype, addr);
898 if (toff)
900 tree_to_aff_combination (toff, sizetype, &tmp);
901 aff_combination_add (addr, &tmp);
904 aff_combination_const (&tmp, sizetype,
905 double_int::from_shwi (bitpos / BITS_PER_UNIT));
906 aff_combination_add (addr, &tmp);
908 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
910 return base;
913 /* Returns true if a region of size SIZE1 at position 0 and a region of
914 size SIZE2 at position DIFF cannot overlap. */
916 bool
917 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
919 double_int d, bound;
921 /* Unless the difference is a constant, we fail. */
922 if (diff->n != 0)
923 return false;
925 d = diff->offset;
926 if (d.is_negative ())
928 /* The second object is before the first one, we succeed if the last
929 element of the second object is before the start of the first one. */
930 bound = d + size2 + double_int_minus_one;
931 return bound.is_negative ();
933 else
935 /* We succeed if the second object starts after the first one ends. */
936 return size1.sle (d);