PR sanitizer/58413
[official-gcc.git] / gcc / tree-affine.c
blob81da521277f5d6fb224ad20c2240021035558475
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
27 #include "gimple.h"
28 #include "flags.h"
29 #include "dumpfile.h"
31 /* Extends CST as appropriate for the affine combinations COMB. */
33 double_int
34 double_int_ext_for_comb (double_int cst, aff_tree *comb)
36 return cst.sext (TYPE_PRECISION (comb->type));
39 /* Initializes affine combination COMB so that its value is zero in TYPE. */
41 static void
42 aff_combination_zero (aff_tree *comb, tree type)
44 comb->type = type;
45 comb->offset = double_int_zero;
46 comb->n = 0;
47 comb->rest = NULL_TREE;
50 /* Sets COMB to CST. */
52 void
53 aff_combination_const (aff_tree *comb, tree type, double_int cst)
55 aff_combination_zero (comb, type);
56 comb->offset = double_int_ext_for_comb (cst, comb);
59 /* Sets COMB to single element ELT. */
61 void
62 aff_combination_elt (aff_tree *comb, tree type, tree elt)
64 aff_combination_zero (comb, type);
66 comb->n = 1;
67 comb->elts[0].val = elt;
68 comb->elts[0].coef = double_int_one;
71 /* Scales COMB by SCALE. */
73 void
74 aff_combination_scale (aff_tree *comb, double_int scale)
76 unsigned i, j;
78 scale = double_int_ext_for_comb (scale, comb);
79 if (scale.is_one ())
80 return;
82 if (scale.is_zero ())
84 aff_combination_zero (comb, comb->type);
85 return;
88 comb->offset
89 = double_int_ext_for_comb (scale * comb->offset, comb);
90 for (i = 0, j = 0; i < comb->n; i++)
92 double_int new_coef;
94 new_coef
95 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
96 /* A coefficient may become zero due to overflow. Remove the zero
97 elements. */
98 if (new_coef.is_zero ())
99 continue;
100 comb->elts[j].coef = new_coef;
101 comb->elts[j].val = comb->elts[i].val;
102 j++;
104 comb->n = j;
106 if (comb->rest)
108 tree type = comb->type;
109 if (POINTER_TYPE_P (type))
110 type = sizetype;
111 if (comb->n < MAX_AFF_ELTS)
113 comb->elts[comb->n].coef = scale;
114 comb->elts[comb->n].val = comb->rest;
115 comb->rest = NULL_TREE;
116 comb->n++;
118 else
119 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
120 double_int_to_tree (type, scale));
124 /* Adds ELT * SCALE to COMB. */
126 void
127 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
129 unsigned i;
130 tree type;
132 scale = double_int_ext_for_comb (scale, comb);
133 if (scale.is_zero ())
134 return;
136 for (i = 0; i < comb->n; i++)
137 if (operand_equal_p (comb->elts[i].val, elt, 0))
139 double_int new_coef;
141 new_coef = comb->elts[i].coef + scale;
142 new_coef = double_int_ext_for_comb (new_coef, comb);
143 if (!new_coef.is_zero ())
145 comb->elts[i].coef = new_coef;
146 return;
149 comb->n--;
150 comb->elts[i] = comb->elts[comb->n];
152 if (comb->rest)
154 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
155 comb->elts[comb->n].coef = double_int_one;
156 comb->elts[comb->n].val = comb->rest;
157 comb->rest = NULL_TREE;
158 comb->n++;
160 return;
162 if (comb->n < MAX_AFF_ELTS)
164 comb->elts[comb->n].coef = scale;
165 comb->elts[comb->n].val = elt;
166 comb->n++;
167 return;
170 type = comb->type;
171 if (POINTER_TYPE_P (type))
172 type = sizetype;
174 if (scale.is_one ())
175 elt = fold_convert (type, elt);
176 else
177 elt = fold_build2 (MULT_EXPR, type,
178 fold_convert (type, elt),
179 double_int_to_tree (type, scale));
181 if (comb->rest)
182 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
183 elt);
184 else
185 comb->rest = elt;
188 /* Adds CST to C. */
190 static void
191 aff_combination_add_cst (aff_tree *c, double_int cst)
193 c->offset = double_int_ext_for_comb (c->offset + cst, c);
196 /* Adds COMB2 to COMB1. */
198 void
199 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
201 unsigned i;
203 aff_combination_add_cst (comb1, comb2->offset);
204 for (i = 0; i < comb2->n; i++)
205 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
206 if (comb2->rest)
207 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
210 /* Converts affine combination COMB to TYPE. */
212 void
213 aff_combination_convert (aff_tree *comb, tree type)
215 unsigned i, j;
216 tree comb_type = comb->type;
218 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
220 tree val = fold_convert (type, aff_combination_to_tree (comb));
221 tree_to_aff_combination (val, type, comb);
222 return;
225 comb->type = type;
226 if (comb->rest && !POINTER_TYPE_P (type))
227 comb->rest = fold_convert (type, comb->rest);
229 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
230 return;
232 comb->offset = double_int_ext_for_comb (comb->offset, comb);
233 for (i = j = 0; i < comb->n; i++)
235 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
236 if (new_coef.is_zero ())
237 continue;
238 comb->elts[j].coef = new_coef;
239 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
240 j++;
243 comb->n = j;
244 if (comb->n < MAX_AFF_ELTS && comb->rest)
246 comb->elts[comb->n].coef = double_int_one;
247 comb->elts[comb->n].val = comb->rest;
248 comb->rest = NULL_TREE;
249 comb->n++;
253 /* Splits EXPR into an affine combination of parts. */
255 void
256 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
258 aff_tree tmp;
259 enum tree_code code;
260 tree cst, core, toffset;
261 HOST_WIDE_INT bitpos, bitsize;
262 enum machine_mode mode;
263 int unsignedp, volatilep;
265 STRIP_NOPS (expr);
267 code = TREE_CODE (expr);
268 switch (code)
270 case INTEGER_CST:
271 aff_combination_const (comb, type, tree_to_double_int (expr));
272 return;
274 case POINTER_PLUS_EXPR:
275 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
276 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
277 aff_combination_add (comb, &tmp);
278 return;
280 case PLUS_EXPR:
281 case MINUS_EXPR:
282 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
283 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
284 if (code == MINUS_EXPR)
285 aff_combination_scale (&tmp, double_int_minus_one);
286 aff_combination_add (comb, &tmp);
287 return;
289 case MULT_EXPR:
290 cst = TREE_OPERAND (expr, 1);
291 if (TREE_CODE (cst) != INTEGER_CST)
292 break;
293 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
294 aff_combination_scale (comb, tree_to_double_int (cst));
295 return;
297 case NEGATE_EXPR:
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 aff_combination_scale (comb, double_int_minus_one);
300 return;
302 case BIT_NOT_EXPR:
303 /* ~x = -x - 1 */
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, double_int_minus_one);
306 aff_combination_add_cst (comb, double_int_minus_one);
307 return;
309 case ADDR_EXPR:
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
313 expr = TREE_OPERAND (expr, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
316 aff_combination_add (comb, &tmp);
317 return;
319 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
320 &toffset, &mode, &unsignedp, &volatilep,
321 false);
322 if (bitpos % BITS_PER_UNIT != 0)
323 break;
324 aff_combination_const (comb, type,
325 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
326 core = build_fold_addr_expr (core);
327 if (TREE_CODE (core) == ADDR_EXPR)
328 aff_combination_add_elt (comb, core, double_int_one);
329 else
331 tree_to_aff_combination (core, type, &tmp);
332 aff_combination_add (comb, &tmp);
334 if (toffset)
336 tree_to_aff_combination (toffset, type, &tmp);
337 aff_combination_add (comb, &tmp);
339 return;
341 case MEM_REF:
342 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
343 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
344 type, comb);
345 else if (integer_zerop (TREE_OPERAND (expr, 1)))
347 aff_combination_elt (comb, type, expr);
348 return;
350 else
351 aff_combination_elt (comb, type,
352 build2 (MEM_REF, TREE_TYPE (expr),
353 TREE_OPERAND (expr, 0),
354 build_int_cst
355 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
356 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
357 aff_combination_add (comb, &tmp);
358 return;
360 default:
361 break;
364 aff_combination_elt (comb, type, expr);
367 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
368 combination COMB. */
370 static tree
371 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
372 aff_tree *comb)
374 enum tree_code code;
375 tree type1 = type;
376 if (POINTER_TYPE_P (type))
377 type1 = sizetype;
379 scale = double_int_ext_for_comb (scale, comb);
381 if (scale.is_minus_one ()
382 && POINTER_TYPE_P (TREE_TYPE (elt)))
384 elt = convert_to_ptrofftype (elt);
385 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
386 scale = double_int_one;
389 if (scale.is_one ())
391 if (!expr)
393 if (POINTER_TYPE_P (TREE_TYPE (elt)))
394 return elt;
395 else
396 return fold_convert (type1, elt);
399 if (POINTER_TYPE_P (TREE_TYPE (expr)))
400 return fold_build_pointer_plus (expr, elt);
401 if (POINTER_TYPE_P (TREE_TYPE (elt)))
402 return fold_build_pointer_plus (elt, expr);
403 return fold_build2 (PLUS_EXPR, type1,
404 expr, fold_convert (type1, elt));
407 if (scale.is_minus_one ())
409 if (!expr)
410 return fold_build1 (NEGATE_EXPR, type1,
411 fold_convert (type1, elt));
413 if (POINTER_TYPE_P (TREE_TYPE (expr)))
415 elt = convert_to_ptrofftype (elt);
416 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
417 return fold_build_pointer_plus (expr, elt);
419 return fold_build2 (MINUS_EXPR, type1,
420 expr, fold_convert (type1, elt));
423 elt = fold_convert (type1, elt);
424 if (!expr)
425 return fold_build2 (MULT_EXPR, type1, elt,
426 double_int_to_tree (type1, scale));
428 if (scale.is_negative ())
430 code = MINUS_EXPR;
431 scale = -scale;
433 else
434 code = PLUS_EXPR;
436 elt = fold_build2 (MULT_EXPR, type1, elt,
437 double_int_to_tree (type1, scale));
438 if (POINTER_TYPE_P (TREE_TYPE (expr)))
440 if (code == MINUS_EXPR)
441 elt = fold_build1 (NEGATE_EXPR, type1, elt);
442 return fold_build_pointer_plus (expr, elt);
444 return fold_build2 (code, type1, expr, elt);
447 /* Makes tree from the affine combination COMB. */
449 tree
450 aff_combination_to_tree (aff_tree *comb)
452 tree type = comb->type;
453 tree expr = NULL_TREE;
454 unsigned i;
455 double_int off, sgn;
456 tree type1 = type;
457 if (POINTER_TYPE_P (type))
458 type1 = sizetype;
460 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
462 for (i = 0; i < comb->n; i++)
463 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
464 comb);
466 if (comb->rest)
467 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
469 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
470 unsigned. */
471 if (comb->offset.is_negative ())
473 off = -comb->offset;
474 sgn = double_int_minus_one;
476 else
478 off = comb->offset;
479 sgn = double_int_one;
481 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
482 comb);
485 /* Copies the tree elements of COMB to ensure that they are not shared. */
487 void
488 unshare_aff_combination (aff_tree *comb)
490 unsigned i;
492 for (i = 0; i < comb->n; i++)
493 comb->elts[i].val = unshare_expr (comb->elts[i].val);
494 if (comb->rest)
495 comb->rest = unshare_expr (comb->rest);
498 /* Remove M-th element from COMB. */
500 void
501 aff_combination_remove_elt (aff_tree *comb, unsigned m)
503 comb->n--;
504 if (m <= comb->n)
505 comb->elts[m] = comb->elts[comb->n];
506 if (comb->rest)
508 comb->elts[comb->n].coef = double_int_one;
509 comb->elts[comb->n].val = comb->rest;
510 comb->rest = NULL_TREE;
511 comb->n++;
515 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
516 C * COEF is added to R. */
519 static void
520 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
521 aff_tree *r)
523 unsigned i;
524 tree aval, type;
526 for (i = 0; i < c->n; i++)
528 aval = c->elts[i].val;
529 if (val)
531 type = TREE_TYPE (aval);
532 aval = fold_build2 (MULT_EXPR, type, aval,
533 fold_convert (type, val));
536 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
539 if (c->rest)
541 aval = c->rest;
542 if (val)
544 type = TREE_TYPE (aval);
545 aval = fold_build2 (MULT_EXPR, type, aval,
546 fold_convert (type, val));
549 aff_combination_add_elt (r, aval, coef);
552 if (val)
553 aff_combination_add_elt (r, val, coef * c->offset);
554 else
555 aff_combination_add_cst (r, coef * c->offset);
558 /* Multiplies C1 by C2, storing the result to R */
560 void
561 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
563 unsigned i;
564 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
566 aff_combination_zero (r, c1->type);
568 for (i = 0; i < c2->n; i++)
569 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
570 if (c2->rest)
571 aff_combination_add_product (c1, double_int_one, c2->rest, r);
572 aff_combination_add_product (c1, c2->offset, NULL, r);
575 /* Returns the element of COMB whose value is VAL, or NULL if no such
576 element exists. If IDX is not NULL, it is set to the index of VAL in
577 COMB. */
579 static struct aff_comb_elt *
580 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
582 unsigned i;
584 for (i = 0; i < comb->n; i++)
585 if (operand_equal_p (comb->elts[i].val, val, 0))
587 if (idx)
588 *idx = i;
590 return &comb->elts[i];
593 return NULL;
596 /* Element of the cache that maps ssa name NAME to its expanded form
597 as an affine expression EXPANSION. */
599 struct name_expansion
601 aff_tree expansion;
603 /* True if the expansion for the name is just being generated. */
604 unsigned in_progress : 1;
607 /* Expands SSA names in COMB recursively. CACHE is used to cache the
608 results. */
610 void
611 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
612 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
614 unsigned i;
615 aff_tree to_add, current, curre;
616 tree e, rhs;
617 gimple def;
618 double_int scale;
619 void **slot;
620 struct name_expansion *exp;
622 aff_combination_zero (&to_add, comb->type);
623 for (i = 0; i < comb->n; i++)
625 tree type, name;
626 enum tree_code code;
628 e = comb->elts[i].val;
629 type = TREE_TYPE (e);
630 name = e;
631 /* Look through some conversions. */
632 if (TREE_CODE (e) == NOP_EXPR
633 && (TYPE_PRECISION (type)
634 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
635 name = TREE_OPERAND (e, 0);
636 if (TREE_CODE (name) != SSA_NAME)
637 continue;
638 def = SSA_NAME_DEF_STMT (name);
639 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
640 continue;
642 code = gimple_assign_rhs_code (def);
643 if (code != SSA_NAME
644 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
645 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
646 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
647 continue;
649 /* We do not know whether the reference retains its value at the
650 place where the expansion is used. */
651 if (TREE_CODE_CLASS (code) == tcc_reference)
652 continue;
654 if (!*cache)
655 *cache = pointer_map_create ();
656 slot = pointer_map_insert (*cache, e);
657 exp = (struct name_expansion *) *slot;
659 if (!exp)
661 exp = XNEW (struct name_expansion);
662 exp->in_progress = 1;
663 *slot = exp;
664 /* In principle this is a generally valid folding, but
665 it is not unconditionally an optimization, so do it
666 here and not in fold_unary. */
667 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
668 than the type of X and overflow for the type of X is
669 undefined. */
670 if (e != name
671 && INTEGRAL_TYPE_P (type)
672 && INTEGRAL_TYPE_P (TREE_TYPE (name))
673 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
674 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
675 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
676 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
677 rhs = fold_build2 (code, type,
678 fold_convert (type, gimple_assign_rhs1 (def)),
679 fold_convert (type, gimple_assign_rhs2 (def)));
680 else
682 rhs = gimple_assign_rhs_to_tree (def);
683 if (e != name)
684 rhs = fold_convert (type, rhs);
686 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
687 exp->expansion = current;
688 exp->in_progress = 0;
690 else
692 /* Since we follow the definitions in the SSA form, we should not
693 enter a cycle unless we pass through a phi node. */
694 gcc_assert (!exp->in_progress);
695 current = exp->expansion;
698 /* Accumulate the new terms to TO_ADD, so that we do not modify
699 COMB while traversing it; include the term -coef * E, to remove
700 it from COMB. */
701 scale = comb->elts[i].coef;
702 aff_combination_zero (&curre, comb->type);
703 aff_combination_add_elt (&curre, e, -scale);
704 aff_combination_scale (&current, scale);
705 aff_combination_add (&to_add, &current);
706 aff_combination_add (&to_add, &curre);
708 aff_combination_add (comb, &to_add);
711 /* Similar to tree_to_aff_combination, but follows SSA name definitions
712 and expands them recursively. CACHE is used to cache the expansions
713 of the ssa names, to avoid exponential time complexity for cases
714 like
716 a1 = a0 + a0;
717 a2 = a1 + a1;
718 a3 = a2 + a2;
719 ... */
721 void
722 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
723 struct pointer_map_t **cache)
725 tree_to_aff_combination (expr, type, comb);
726 aff_combination_expand (comb, cache);
729 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
730 pointer_map_traverse. */
732 static bool
733 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
734 void *data ATTRIBUTE_UNUSED)
736 struct name_expansion *const exp = (struct name_expansion *) *value;
738 free (exp);
739 return true;
742 /* Frees memory allocated for the CACHE used by
743 tree_to_aff_combination_expand. */
745 void
746 free_affine_expand_cache (struct pointer_map_t **cache)
748 if (!*cache)
749 return;
751 pointer_map_traverse (*cache, free_name_expansion, NULL);
752 pointer_map_destroy (*cache);
753 *cache = NULL;
756 /* If VAL != CST * DIV for any constant CST, returns false.
757 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
758 and if they are different, returns false. Finally, if neither of these
759 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
760 is set to true. */
762 static bool
763 double_int_constant_multiple_p (double_int val, double_int div,
764 bool *mult_set, double_int *mult)
766 double_int rem, cst;
768 if (val.is_zero ())
770 if (*mult_set && !mult->is_zero ())
771 return false;
772 *mult_set = true;
773 *mult = double_int_zero;
774 return true;
777 if (div.is_zero ())
778 return false;
780 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
781 if (!rem.is_zero ())
782 return false;
784 if (*mult_set && *mult != cst)
785 return false;
787 *mult_set = true;
788 *mult = cst;
789 return true;
792 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
793 X is stored to MULT. */
795 bool
796 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
797 double_int *mult)
799 bool mult_set = false;
800 unsigned i;
802 if (val->n == 0 && val->offset.is_zero ())
804 *mult = double_int_zero;
805 return true;
807 if (val->n != div->n)
808 return false;
810 if (val->rest || div->rest)
811 return false;
813 if (!double_int_constant_multiple_p (val->offset, div->offset,
814 &mult_set, mult))
815 return false;
817 for (i = 0; i < div->n; i++)
819 struct aff_comb_elt *elt
820 = aff_combination_find_elt (val, div->elts[i].val, NULL);
821 if (!elt)
822 return false;
823 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
824 &mult_set, mult))
825 return false;
828 gcc_assert (mult_set);
829 return true;
832 /* Prints the affine VAL to the FILE. */
834 static void
835 print_aff (FILE *file, aff_tree *val)
837 unsigned i;
838 bool uns = TYPE_UNSIGNED (val->type);
839 if (POINTER_TYPE_P (val->type))
840 uns = false;
841 fprintf (file, "{\n type = ");
842 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
843 fprintf (file, "\n offset = ");
844 dump_double_int (file, val->offset, uns);
845 if (val->n > 0)
847 fprintf (file, "\n elements = {\n");
848 for (i = 0; i < val->n; i++)
850 fprintf (file, " [%d] = ", i);
851 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
853 fprintf (file, " * ");
854 dump_double_int (file, val->elts[i].coef, uns);
855 if (i != val->n - 1)
856 fprintf (file, ", \n");
858 fprintf (file, "\n }");
860 if (val->rest)
862 fprintf (file, "\n rest = ");
863 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
865 fprintf (file, "\n}");
868 /* Prints the affine VAL to the standard error, used for debugging. */
870 DEBUG_FUNCTION void
871 debug_aff (aff_tree *val)
873 print_aff (stderr, val);
874 fprintf (stderr, "\n");
877 /* Returns address of the reference REF in ADDR. The size of the accessed
878 location is stored to SIZE. */
880 void
881 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
883 HOST_WIDE_INT bitsize, bitpos;
884 tree toff;
885 enum machine_mode mode;
886 int uns, vol;
887 aff_tree tmp;
888 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
889 &uns, &vol, false);
890 tree base_addr = build_fold_addr_expr (base);
892 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
894 tree_to_aff_combination (base_addr, sizetype, addr);
896 if (toff)
898 tree_to_aff_combination (toff, sizetype, &tmp);
899 aff_combination_add (addr, &tmp);
902 aff_combination_const (&tmp, sizetype,
903 double_int::from_shwi (bitpos / BITS_PER_UNIT));
904 aff_combination_add (addr, &tmp);
906 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
909 /* Returns true if a region of size SIZE1 at position 0 and a region of
910 size SIZE2 at position DIFF cannot overlap. */
912 bool
913 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
915 double_int d, bound;
917 /* Unless the difference is a constant, we fail. */
918 if (diff->n != 0)
919 return false;
921 d = diff->offset;
922 if (d.is_negative ())
924 /* The second object is before the first one, we succeed if the last
925 element of the second object is before the start of the first one. */
926 bound = d + size2 + double_int_minus_one;
927 return bound.is_negative ();
929 else
931 /* We succeed if the second object starts after the first one ends. */
932 return size1.sle (d);