2013-12-06 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / tree-affine.c
blobf93f186a761cfa84d950570281a2dcb9cf64aa14
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "expr.h"
25 #include "tree-pretty-print.h"
26 #include "pointer-set.h"
27 #include "tree-affine.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimplify.h"
35 #include "flags.h"
36 #include "dumpfile.h"
37 #include "cfgexpand.h"
39 /* Extends CST as appropriate for the affine combinations COMB. */
41 double_int
42 double_int_ext_for_comb (double_int cst, aff_tree *comb)
44 return cst.sext (TYPE_PRECISION (comb->type));
47 /* Initializes affine combination COMB so that its value is zero in TYPE. */
49 static void
50 aff_combination_zero (aff_tree *comb, tree type)
52 comb->type = type;
53 comb->offset = double_int_zero;
54 comb->n = 0;
55 comb->rest = NULL_TREE;
58 /* Sets COMB to CST. */
60 void
61 aff_combination_const (aff_tree *comb, tree type, double_int cst)
63 aff_combination_zero (comb, type);
64 comb->offset = double_int_ext_for_comb (cst, comb);
67 /* Sets COMB to single element ELT. */
69 void
70 aff_combination_elt (aff_tree *comb, tree type, tree elt)
72 aff_combination_zero (comb, type);
74 comb->n = 1;
75 comb->elts[0].val = elt;
76 comb->elts[0].coef = double_int_one;
79 /* Scales COMB by SCALE. */
81 void
82 aff_combination_scale (aff_tree *comb, double_int scale)
84 unsigned i, j;
86 scale = double_int_ext_for_comb (scale, comb);
87 if (scale.is_one ())
88 return;
90 if (scale.is_zero ())
92 aff_combination_zero (comb, comb->type);
93 return;
96 comb->offset
97 = double_int_ext_for_comb (scale * comb->offset, comb);
98 for (i = 0, j = 0; i < comb->n; i++)
100 double_int new_coef;
102 new_coef
103 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
104 /* A coefficient may become zero due to overflow. Remove the zero
105 elements. */
106 if (new_coef.is_zero ())
107 continue;
108 comb->elts[j].coef = new_coef;
109 comb->elts[j].val = comb->elts[i].val;
110 j++;
112 comb->n = j;
114 if (comb->rest)
116 tree type = comb->type;
117 if (POINTER_TYPE_P (type))
118 type = sizetype;
119 if (comb->n < MAX_AFF_ELTS)
121 comb->elts[comb->n].coef = scale;
122 comb->elts[comb->n].val = comb->rest;
123 comb->rest = NULL_TREE;
124 comb->n++;
126 else
127 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
128 double_int_to_tree (type, scale));
132 /* Adds ELT * SCALE to COMB. */
134 void
135 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
137 unsigned i;
138 tree type;
140 scale = double_int_ext_for_comb (scale, comb);
141 if (scale.is_zero ())
142 return;
144 for (i = 0; i < comb->n; i++)
145 if (operand_equal_p (comb->elts[i].val, elt, 0))
147 double_int new_coef;
149 new_coef = comb->elts[i].coef + scale;
150 new_coef = double_int_ext_for_comb (new_coef, comb);
151 if (!new_coef.is_zero ())
153 comb->elts[i].coef = new_coef;
154 return;
157 comb->n--;
158 comb->elts[i] = comb->elts[comb->n];
160 if (comb->rest)
162 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
163 comb->elts[comb->n].coef = double_int_one;
164 comb->elts[comb->n].val = comb->rest;
165 comb->rest = NULL_TREE;
166 comb->n++;
168 return;
170 if (comb->n < MAX_AFF_ELTS)
172 comb->elts[comb->n].coef = scale;
173 comb->elts[comb->n].val = elt;
174 comb->n++;
175 return;
178 type = comb->type;
179 if (POINTER_TYPE_P (type))
180 type = sizetype;
182 if (scale.is_one ())
183 elt = fold_convert (type, elt);
184 else
185 elt = fold_build2 (MULT_EXPR, type,
186 fold_convert (type, elt),
187 double_int_to_tree (type, scale));
189 if (comb->rest)
190 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
191 elt);
192 else
193 comb->rest = elt;
196 /* Adds CST to C. */
198 static void
199 aff_combination_add_cst (aff_tree *c, double_int cst)
201 c->offset = double_int_ext_for_comb (c->offset + cst, c);
204 /* Adds COMB2 to COMB1. */
206 void
207 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
209 unsigned i;
211 aff_combination_add_cst (comb1, comb2->offset);
212 for (i = 0; i < comb2->n; i++)
213 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
214 if (comb2->rest)
215 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
218 /* Converts affine combination COMB to TYPE. */
220 void
221 aff_combination_convert (aff_tree *comb, tree type)
223 unsigned i, j;
224 tree comb_type = comb->type;
226 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
228 tree val = fold_convert (type, aff_combination_to_tree (comb));
229 tree_to_aff_combination (val, type, comb);
230 return;
233 comb->type = type;
234 if (comb->rest && !POINTER_TYPE_P (type))
235 comb->rest = fold_convert (type, comb->rest);
237 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
238 return;
240 comb->offset = double_int_ext_for_comb (comb->offset, comb);
241 for (i = j = 0; i < comb->n; i++)
243 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
244 if (new_coef.is_zero ())
245 continue;
246 comb->elts[j].coef = new_coef;
247 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
248 j++;
251 comb->n = j;
252 if (comb->n < MAX_AFF_ELTS && comb->rest)
254 comb->elts[comb->n].coef = double_int_one;
255 comb->elts[comb->n].val = comb->rest;
256 comb->rest = NULL_TREE;
257 comb->n++;
261 /* Splits EXPR into an affine combination of parts. */
263 void
264 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
266 aff_tree tmp;
267 enum tree_code code;
268 tree cst, core, toffset;
269 HOST_WIDE_INT bitpos, bitsize;
270 enum machine_mode mode;
271 int unsignedp, volatilep;
273 STRIP_NOPS (expr);
275 code = TREE_CODE (expr);
276 switch (code)
278 case INTEGER_CST:
279 aff_combination_const (comb, type, tree_to_double_int (expr));
280 return;
282 case POINTER_PLUS_EXPR:
283 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
284 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
285 aff_combination_add (comb, &tmp);
286 return;
288 case PLUS_EXPR:
289 case MINUS_EXPR:
290 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
291 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
292 if (code == MINUS_EXPR)
293 aff_combination_scale (&tmp, double_int_minus_one);
294 aff_combination_add (comb, &tmp);
295 return;
297 case MULT_EXPR:
298 cst = TREE_OPERAND (expr, 1);
299 if (TREE_CODE (cst) != INTEGER_CST)
300 break;
301 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
302 aff_combination_scale (comb, tree_to_double_int (cst));
303 return;
305 case NEGATE_EXPR:
306 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
307 aff_combination_scale (comb, double_int_minus_one);
308 return;
310 case BIT_NOT_EXPR:
311 /* ~x = -x - 1 */
312 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
313 aff_combination_scale (comb, double_int_minus_one);
314 aff_combination_add_cst (comb, double_int_minus_one);
315 return;
317 case ADDR_EXPR:
318 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
319 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
321 expr = TREE_OPERAND (expr, 0);
322 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
323 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
324 aff_combination_add (comb, &tmp);
325 return;
327 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
328 &toffset, &mode, &unsignedp, &volatilep,
329 false);
330 if (bitpos % BITS_PER_UNIT != 0)
331 break;
332 aff_combination_const (comb, type,
333 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
334 core = build_fold_addr_expr (core);
335 if (TREE_CODE (core) == ADDR_EXPR)
336 aff_combination_add_elt (comb, core, double_int_one);
337 else
339 tree_to_aff_combination (core, type, &tmp);
340 aff_combination_add (comb, &tmp);
342 if (toffset)
344 tree_to_aff_combination (toffset, type, &tmp);
345 aff_combination_add (comb, &tmp);
347 return;
349 case MEM_REF:
350 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
351 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
352 type, comb);
353 else if (integer_zerop (TREE_OPERAND (expr, 1)))
355 aff_combination_elt (comb, type, expr);
356 return;
358 else
359 aff_combination_elt (comb, type,
360 build2 (MEM_REF, TREE_TYPE (expr),
361 TREE_OPERAND (expr, 0),
362 build_int_cst
363 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
364 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
365 aff_combination_add (comb, &tmp);
366 return;
368 default:
369 break;
372 aff_combination_elt (comb, type, expr);
375 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
376 combination COMB. */
378 static tree
379 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
380 aff_tree *comb)
382 enum tree_code code;
383 tree type1 = type;
384 if (POINTER_TYPE_P (type))
385 type1 = sizetype;
387 scale = double_int_ext_for_comb (scale, comb);
389 if (scale.is_minus_one ()
390 && POINTER_TYPE_P (TREE_TYPE (elt)))
392 elt = convert_to_ptrofftype (elt);
393 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
394 scale = double_int_one;
397 if (scale.is_one ())
399 if (!expr)
401 if (POINTER_TYPE_P (TREE_TYPE (elt)))
402 return elt;
403 else
404 return fold_convert (type1, elt);
407 if (POINTER_TYPE_P (TREE_TYPE (expr)))
408 return fold_build_pointer_plus (expr, elt);
409 if (POINTER_TYPE_P (TREE_TYPE (elt)))
410 return fold_build_pointer_plus (elt, expr);
411 return fold_build2 (PLUS_EXPR, type1,
412 expr, fold_convert (type1, elt));
415 if (scale.is_minus_one ())
417 if (!expr)
418 return fold_build1 (NEGATE_EXPR, type1,
419 fold_convert (type1, elt));
421 if (POINTER_TYPE_P (TREE_TYPE (expr)))
423 elt = convert_to_ptrofftype (elt);
424 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
425 return fold_build_pointer_plus (expr, elt);
427 return fold_build2 (MINUS_EXPR, type1,
428 expr, fold_convert (type1, elt));
431 elt = fold_convert (type1, elt);
432 if (!expr)
433 return fold_build2 (MULT_EXPR, type1, elt,
434 double_int_to_tree (type1, scale));
436 if (scale.is_negative ())
438 code = MINUS_EXPR;
439 scale = -scale;
441 else
442 code = PLUS_EXPR;
444 elt = fold_build2 (MULT_EXPR, type1, elt,
445 double_int_to_tree (type1, scale));
446 if (POINTER_TYPE_P (TREE_TYPE (expr)))
448 if (code == MINUS_EXPR)
449 elt = fold_build1 (NEGATE_EXPR, type1, elt);
450 return fold_build_pointer_plus (expr, elt);
452 return fold_build2 (code, type1, expr, elt);
455 /* Makes tree from the affine combination COMB. */
457 tree
458 aff_combination_to_tree (aff_tree *comb)
460 tree type = comb->type;
461 tree expr = NULL_TREE;
462 unsigned i;
463 double_int off, sgn;
464 tree type1 = type;
465 if (POINTER_TYPE_P (type))
466 type1 = sizetype;
468 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
470 for (i = 0; i < comb->n; i++)
471 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
472 comb);
474 if (comb->rest)
475 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
477 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
478 unsigned. */
479 if (comb->offset.is_negative ())
481 off = -comb->offset;
482 sgn = double_int_minus_one;
484 else
486 off = comb->offset;
487 sgn = double_int_one;
489 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
490 comb);
493 /* Copies the tree elements of COMB to ensure that they are not shared. */
495 void
496 unshare_aff_combination (aff_tree *comb)
498 unsigned i;
500 for (i = 0; i < comb->n; i++)
501 comb->elts[i].val = unshare_expr (comb->elts[i].val);
502 if (comb->rest)
503 comb->rest = unshare_expr (comb->rest);
506 /* Remove M-th element from COMB. */
508 void
509 aff_combination_remove_elt (aff_tree *comb, unsigned m)
511 comb->n--;
512 if (m <= comb->n)
513 comb->elts[m] = comb->elts[comb->n];
514 if (comb->rest)
516 comb->elts[comb->n].coef = double_int_one;
517 comb->elts[comb->n].val = comb->rest;
518 comb->rest = NULL_TREE;
519 comb->n++;
523 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
524 C * COEF is added to R. */
527 static void
528 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
529 aff_tree *r)
531 unsigned i;
532 tree aval, type;
534 for (i = 0; i < c->n; i++)
536 aval = c->elts[i].val;
537 if (val)
539 type = TREE_TYPE (aval);
540 aval = fold_build2 (MULT_EXPR, type, aval,
541 fold_convert (type, val));
544 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
547 if (c->rest)
549 aval = c->rest;
550 if (val)
552 type = TREE_TYPE (aval);
553 aval = fold_build2 (MULT_EXPR, type, aval,
554 fold_convert (type, val));
557 aff_combination_add_elt (r, aval, coef);
560 if (val)
561 aff_combination_add_elt (r, val, coef * c->offset);
562 else
563 aff_combination_add_cst (r, coef * c->offset);
566 /* Multiplies C1 by C2, storing the result to R */
568 void
569 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
571 unsigned i;
572 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
574 aff_combination_zero (r, c1->type);
576 for (i = 0; i < c2->n; i++)
577 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
578 if (c2->rest)
579 aff_combination_add_product (c1, double_int_one, c2->rest, r);
580 aff_combination_add_product (c1, c2->offset, NULL, r);
583 /* Returns the element of COMB whose value is VAL, or NULL if no such
584 element exists. If IDX is not NULL, it is set to the index of VAL in
585 COMB. */
587 static struct aff_comb_elt *
588 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
590 unsigned i;
592 for (i = 0; i < comb->n; i++)
593 if (operand_equal_p (comb->elts[i].val, val, 0))
595 if (idx)
596 *idx = i;
598 return &comb->elts[i];
601 return NULL;
604 /* Element of the cache that maps ssa name NAME to its expanded form
605 as an affine expression EXPANSION. */
607 struct name_expansion
609 aff_tree expansion;
611 /* True if the expansion for the name is just being generated. */
612 unsigned in_progress : 1;
615 /* Expands SSA names in COMB recursively. CACHE is used to cache the
616 results. */
618 void
619 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
620 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
622 unsigned i;
623 aff_tree to_add, current, curre;
624 tree e, rhs;
625 gimple def;
626 double_int scale;
627 void **slot;
628 struct name_expansion *exp;
630 aff_combination_zero (&to_add, comb->type);
631 for (i = 0; i < comb->n; i++)
633 tree type, name;
634 enum tree_code code;
636 e = comb->elts[i].val;
637 type = TREE_TYPE (e);
638 name = e;
639 /* Look through some conversions. */
640 if (TREE_CODE (e) == NOP_EXPR
641 && (TYPE_PRECISION (type)
642 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
643 name = TREE_OPERAND (e, 0);
644 if (TREE_CODE (name) != SSA_NAME)
645 continue;
646 def = SSA_NAME_DEF_STMT (name);
647 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
648 continue;
650 code = gimple_assign_rhs_code (def);
651 if (code != SSA_NAME
652 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
653 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
654 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
655 continue;
657 /* We do not know whether the reference retains its value at the
658 place where the expansion is used. */
659 if (TREE_CODE_CLASS (code) == tcc_reference)
660 continue;
662 if (!*cache)
663 *cache = pointer_map_create ();
664 slot = pointer_map_insert (*cache, e);
665 exp = (struct name_expansion *) *slot;
667 if (!exp)
669 exp = XNEW (struct name_expansion);
670 exp->in_progress = 1;
671 *slot = exp;
672 /* In principle this is a generally valid folding, but
673 it is not unconditionally an optimization, so do it
674 here and not in fold_unary. */
675 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
676 than the type of X and overflow for the type of X is
677 undefined. */
678 if (e != name
679 && INTEGRAL_TYPE_P (type)
680 && INTEGRAL_TYPE_P (TREE_TYPE (name))
681 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
682 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
683 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
684 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
685 rhs = fold_build2 (code, type,
686 fold_convert (type, gimple_assign_rhs1 (def)),
687 fold_convert (type, gimple_assign_rhs2 (def)));
688 else
690 rhs = gimple_assign_rhs_to_tree (def);
691 if (e != name)
692 rhs = fold_convert (type, rhs);
694 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
695 exp->expansion = current;
696 exp->in_progress = 0;
698 else
700 /* Since we follow the definitions in the SSA form, we should not
701 enter a cycle unless we pass through a phi node. */
702 gcc_assert (!exp->in_progress);
703 current = exp->expansion;
706 /* Accumulate the new terms to TO_ADD, so that we do not modify
707 COMB while traversing it; include the term -coef * E, to remove
708 it from COMB. */
709 scale = comb->elts[i].coef;
710 aff_combination_zero (&curre, comb->type);
711 aff_combination_add_elt (&curre, e, -scale);
712 aff_combination_scale (&current, scale);
713 aff_combination_add (&to_add, &current);
714 aff_combination_add (&to_add, &curre);
716 aff_combination_add (comb, &to_add);
719 /* Similar to tree_to_aff_combination, but follows SSA name definitions
720 and expands them recursively. CACHE is used to cache the expansions
721 of the ssa names, to avoid exponential time complexity for cases
722 like
724 a1 = a0 + a0;
725 a2 = a1 + a1;
726 a3 = a2 + a2;
727 ... */
729 void
730 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
731 struct pointer_map_t **cache)
733 tree_to_aff_combination (expr, type, comb);
734 aff_combination_expand (comb, cache);
737 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
738 pointer_map_traverse. */
740 static bool
741 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
742 void *data ATTRIBUTE_UNUSED)
744 struct name_expansion *const exp = (struct name_expansion *) *value;
746 free (exp);
747 return true;
750 /* Frees memory allocated for the CACHE used by
751 tree_to_aff_combination_expand. */
753 void
754 free_affine_expand_cache (struct pointer_map_t **cache)
756 if (!*cache)
757 return;
759 pointer_map_traverse (*cache, free_name_expansion, NULL);
760 pointer_map_destroy (*cache);
761 *cache = NULL;
764 /* If VAL != CST * DIV for any constant CST, returns false.
765 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
766 and if they are different, returns false. Finally, if neither of these
767 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
768 is set to true. */
770 static bool
771 double_int_constant_multiple_p (double_int val, double_int div,
772 bool *mult_set, double_int *mult)
774 double_int rem, cst;
776 if (val.is_zero ())
778 if (*mult_set && !mult->is_zero ())
779 return false;
780 *mult_set = true;
781 *mult = double_int_zero;
782 return true;
785 if (div.is_zero ())
786 return false;
788 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
789 if (!rem.is_zero ())
790 return false;
792 if (*mult_set && *mult != cst)
793 return false;
795 *mult_set = true;
796 *mult = cst;
797 return true;
800 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
801 X is stored to MULT. */
803 bool
804 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
805 double_int *mult)
807 bool mult_set = false;
808 unsigned i;
810 if (val->n == 0 && val->offset.is_zero ())
812 *mult = double_int_zero;
813 return true;
815 if (val->n != div->n)
816 return false;
818 if (val->rest || div->rest)
819 return false;
821 if (!double_int_constant_multiple_p (val->offset, div->offset,
822 &mult_set, mult))
823 return false;
825 for (i = 0; i < div->n; i++)
827 struct aff_comb_elt *elt
828 = aff_combination_find_elt (val, div->elts[i].val, NULL);
829 if (!elt)
830 return false;
831 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
832 &mult_set, mult))
833 return false;
836 gcc_assert (mult_set);
837 return true;
840 /* Prints the affine VAL to the FILE. */
842 static void
843 print_aff (FILE *file, aff_tree *val)
845 unsigned i;
846 bool uns = TYPE_UNSIGNED (val->type);
847 if (POINTER_TYPE_P (val->type))
848 uns = false;
849 fprintf (file, "{\n type = ");
850 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
851 fprintf (file, "\n offset = ");
852 dump_double_int (file, val->offset, uns);
853 if (val->n > 0)
855 fprintf (file, "\n elements = {\n");
856 for (i = 0; i < val->n; i++)
858 fprintf (file, " [%d] = ", i);
859 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
861 fprintf (file, " * ");
862 dump_double_int (file, val->elts[i].coef, uns);
863 if (i != val->n - 1)
864 fprintf (file, ", \n");
866 fprintf (file, "\n }");
868 if (val->rest)
870 fprintf (file, "\n rest = ");
871 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
873 fprintf (file, "\n}");
876 /* Prints the affine VAL to the standard error, used for debugging. */
878 DEBUG_FUNCTION void
879 debug_aff (aff_tree *val)
881 print_aff (stderr, val);
882 fprintf (stderr, "\n");
885 /* Computes address of the reference REF in ADDR. The size of the accessed
886 location is stored to SIZE. Returns the ultimate containing object to
887 which REF refers. */
889 tree
890 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
892 HOST_WIDE_INT bitsize, bitpos;
893 tree toff;
894 enum machine_mode mode;
895 int uns, vol;
896 aff_tree tmp;
897 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
898 &uns, &vol, false);
899 tree base_addr = build_fold_addr_expr (base);
901 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
903 tree_to_aff_combination (base_addr, sizetype, addr);
905 if (toff)
907 tree_to_aff_combination (toff, sizetype, &tmp);
908 aff_combination_add (addr, &tmp);
911 aff_combination_const (&tmp, sizetype,
912 double_int::from_shwi (bitpos / BITS_PER_UNIT));
913 aff_combination_add (addr, &tmp);
915 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
917 return base;
920 /* Returns true if a region of size SIZE1 at position 0 and a region of
921 size SIZE2 at position DIFF cannot overlap. */
923 bool
924 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
926 double_int d, bound;
928 /* Unless the difference is a constant, we fail. */
929 if (diff->n != 0)
930 return false;
932 d = diff->offset;
933 if (d.is_negative ())
935 /* The second object is before the first one, we succeed if the last
936 element of the second object is before the start of the first one. */
937 bound = d + size2 + double_int_minus_one;
938 return bound.is_negative ();
940 else
942 /* We succeed if the second object starts after the first one ends. */
943 return size1.sle (d);