c-objc-common.c (c_tree_printer): Tidy.
[official-gcc.git] / gcc / tree-affine.c
blob914b3d77051973e7bb1395c32da6e998e064cb8c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
27 #include "gimple.h"
28 #include "flags.h"
29 #include "dumpfile.h"
31 /* Extends CST as appropriate for the affine combinations COMB. */
33 double_int
34 double_int_ext_for_comb (double_int cst, aff_tree *comb)
36 return cst.sext (TYPE_PRECISION (comb->type));
39 /* Initializes affine combination COMB so that its value is zero in TYPE. */
41 static void
42 aff_combination_zero (aff_tree *comb, tree type)
44 comb->type = type;
45 comb->offset = double_int_zero;
46 comb->n = 0;
47 comb->rest = NULL_TREE;
50 /* Sets COMB to CST. */
52 void
53 aff_combination_const (aff_tree *comb, tree type, double_int cst)
55 aff_combination_zero (comb, type);
56 comb->offset = double_int_ext_for_comb (cst, comb);
59 /* Sets COMB to single element ELT. */
61 void
62 aff_combination_elt (aff_tree *comb, tree type, tree elt)
64 aff_combination_zero (comb, type);
66 comb->n = 1;
67 comb->elts[0].val = elt;
68 comb->elts[0].coef = double_int_one;
71 /* Scales COMB by SCALE. */
73 void
74 aff_combination_scale (aff_tree *comb, double_int scale)
76 unsigned i, j;
78 scale = double_int_ext_for_comb (scale, comb);
79 if (scale.is_one ())
80 return;
82 if (scale.is_zero ())
84 aff_combination_zero (comb, comb->type);
85 return;
88 comb->offset
89 = double_int_ext_for_comb (scale * comb->offset, comb);
90 for (i = 0, j = 0; i < comb->n; i++)
92 double_int new_coef;
94 new_coef
95 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
96 /* A coefficient may become zero due to overflow. Remove the zero
97 elements. */
98 if (new_coef.is_zero ())
99 continue;
100 comb->elts[j].coef = new_coef;
101 comb->elts[j].val = comb->elts[i].val;
102 j++;
104 comb->n = j;
106 if (comb->rest)
108 tree type = comb->type;
109 if (POINTER_TYPE_P (type))
110 type = sizetype;
111 if (comb->n < MAX_AFF_ELTS)
113 comb->elts[comb->n].coef = scale;
114 comb->elts[comb->n].val = comb->rest;
115 comb->rest = NULL_TREE;
116 comb->n++;
118 else
119 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
120 double_int_to_tree (type, scale));
124 /* Adds ELT * SCALE to COMB. */
126 void
127 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
129 unsigned i;
130 tree type;
132 scale = double_int_ext_for_comb (scale, comb);
133 if (scale.is_zero ())
134 return;
136 for (i = 0; i < comb->n; i++)
137 if (operand_equal_p (comb->elts[i].val, elt, 0))
139 double_int new_coef;
141 new_coef = comb->elts[i].coef + scale;
142 new_coef = double_int_ext_for_comb (new_coef, comb);
143 if (!new_coef.is_zero ())
145 comb->elts[i].coef = new_coef;
146 return;
149 comb->n--;
150 comb->elts[i] = comb->elts[comb->n];
152 if (comb->rest)
154 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
155 comb->elts[comb->n].coef = double_int_one;
156 comb->elts[comb->n].val = comb->rest;
157 comb->rest = NULL_TREE;
158 comb->n++;
160 return;
162 if (comb->n < MAX_AFF_ELTS)
164 comb->elts[comb->n].coef = scale;
165 comb->elts[comb->n].val = elt;
166 comb->n++;
167 return;
170 type = comb->type;
171 if (POINTER_TYPE_P (type))
172 type = sizetype;
174 if (scale.is_one ())
175 elt = fold_convert (type, elt);
176 else
177 elt = fold_build2 (MULT_EXPR, type,
178 fold_convert (type, elt),
179 double_int_to_tree (type, scale));
181 if (comb->rest)
182 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
183 elt);
184 else
185 comb->rest = elt;
188 /* Adds CST to C. */
190 static void
191 aff_combination_add_cst (aff_tree *c, double_int cst)
193 c->offset = double_int_ext_for_comb (c->offset + cst, c);
196 /* Adds COMB2 to COMB1. */
198 void
199 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
201 unsigned i;
203 aff_combination_add_cst (comb1, comb2->offset);
204 for (i = 0; i < comb2->n; i++)
205 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
206 if (comb2->rest)
207 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
210 /* Converts affine combination COMB to TYPE. */
212 void
213 aff_combination_convert (aff_tree *comb, tree type)
215 unsigned i, j;
216 tree comb_type = comb->type;
218 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
220 tree val = fold_convert (type, aff_combination_to_tree (comb));
221 tree_to_aff_combination (val, type, comb);
222 return;
225 comb->type = type;
226 if (comb->rest && !POINTER_TYPE_P (type))
227 comb->rest = fold_convert (type, comb->rest);
229 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
230 return;
232 comb->offset = double_int_ext_for_comb (comb->offset, comb);
233 for (i = j = 0; i < comb->n; i++)
235 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
236 if (new_coef.is_zero ())
237 continue;
238 comb->elts[j].coef = new_coef;
239 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
240 j++;
243 comb->n = j;
244 if (comb->n < MAX_AFF_ELTS && comb->rest)
246 comb->elts[comb->n].coef = double_int_one;
247 comb->elts[comb->n].val = comb->rest;
248 comb->rest = NULL_TREE;
249 comb->n++;
253 /* Splits EXPR into an affine combination of parts. */
255 void
256 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
258 aff_tree tmp;
259 enum tree_code code;
260 tree cst, core, toffset;
261 HOST_WIDE_INT bitpos, bitsize;
262 enum machine_mode mode;
263 int unsignedp, volatilep;
265 STRIP_NOPS (expr);
267 code = TREE_CODE (expr);
268 switch (code)
270 case INTEGER_CST:
271 aff_combination_const (comb, type, tree_to_double_int (expr));
272 return;
274 case POINTER_PLUS_EXPR:
275 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
276 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
277 aff_combination_add (comb, &tmp);
278 return;
280 case PLUS_EXPR:
281 case MINUS_EXPR:
282 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
283 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
284 if (code == MINUS_EXPR)
285 aff_combination_scale (&tmp, double_int_minus_one);
286 aff_combination_add (comb, &tmp);
287 return;
289 case MULT_EXPR:
290 cst = TREE_OPERAND (expr, 1);
291 if (TREE_CODE (cst) != INTEGER_CST)
292 break;
293 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
294 aff_combination_scale (comb, tree_to_double_int (cst));
295 return;
297 case NEGATE_EXPR:
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 aff_combination_scale (comb, double_int_minus_one);
300 return;
302 case BIT_NOT_EXPR:
303 /* ~x = -x - 1 */
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, double_int_minus_one);
306 aff_combination_add_cst (comb, double_int_minus_one);
307 return;
309 case ADDR_EXPR:
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
313 expr = TREE_OPERAND (expr, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
316 aff_combination_add (comb, &tmp);
317 return;
319 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
320 &toffset, &mode, &unsignedp, &volatilep,
321 false);
322 if (bitpos % BITS_PER_UNIT != 0)
323 break;
324 aff_combination_const (comb, type,
325 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
326 core = build_fold_addr_expr (core);
327 if (TREE_CODE (core) == ADDR_EXPR)
328 aff_combination_add_elt (comb, core, double_int_one);
329 else
331 tree_to_aff_combination (core, type, &tmp);
332 aff_combination_add (comb, &tmp);
334 if (toffset)
336 tree_to_aff_combination (toffset, type, &tmp);
337 aff_combination_add (comb, &tmp);
339 return;
341 case MEM_REF:
342 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
343 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
344 type, comb);
345 else if (integer_zerop (TREE_OPERAND (expr, 1)))
347 aff_combination_elt (comb, type, expr);
348 return;
350 else
351 aff_combination_elt (comb, type,
352 build2 (MEM_REF, TREE_TYPE (expr),
353 TREE_OPERAND (expr, 0),
354 build_int_cst
355 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
356 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
357 aff_combination_add (comb, &tmp);
358 return;
360 default:
361 break;
364 aff_combination_elt (comb, type, expr);
367 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
368 combination COMB. */
370 static tree
371 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
372 aff_tree *comb)
374 enum tree_code code;
375 tree type1 = type;
376 if (POINTER_TYPE_P (type))
377 type1 = sizetype;
379 scale = double_int_ext_for_comb (scale, comb);
381 if (scale.is_minus_one ()
382 && POINTER_TYPE_P (TREE_TYPE (elt)))
384 elt = fold_build1 (NEGATE_EXPR, sizetype, convert_to_ptrofftype (elt));
385 scale = double_int_one;
388 if (scale.is_one ())
390 if (!expr)
391 return elt;
393 if (POINTER_TYPE_P (TREE_TYPE (expr)))
394 return fold_build_pointer_plus (expr, convert_to_ptrofftype (elt));
395 if (POINTER_TYPE_P (TREE_TYPE (elt)))
396 return fold_build_pointer_plus (elt, convert_to_ptrofftype (expr));
397 return fold_build2 (PLUS_EXPR, type1,
398 fold_convert (type1, expr),
399 fold_convert (type1, elt));
402 if (scale.is_minus_one ())
404 if (!expr)
405 return fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
407 if (POINTER_TYPE_P (TREE_TYPE (expr)))
408 return fold_build_pointer_plus
409 (expr, convert_to_ptrofftype
410 (fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt)));
411 return fold_build2 (MINUS_EXPR, type1,
412 fold_convert (type1, expr),
413 fold_convert (type1, elt));
416 elt = fold_convert (type1, elt);
417 if (!expr)
418 return fold_build2 (MULT_EXPR, type1, elt,
419 double_int_to_tree (type1, scale));
421 if (scale.is_negative ())
423 code = MINUS_EXPR;
424 scale = -scale;
426 else
427 code = PLUS_EXPR;
429 elt = fold_build2 (MULT_EXPR, type1, elt,
430 double_int_to_tree (type1, scale));
431 if (POINTER_TYPE_P (TREE_TYPE (expr)))
433 if (code == MINUS_EXPR)
434 elt = fold_build1 (NEGATE_EXPR, type1, elt);
435 return fold_build_pointer_plus (expr, elt);
437 return fold_build2 (code, type1,
438 fold_convert (type1, expr), elt);
441 /* Makes tree from the affine combination COMB. */
443 tree
444 aff_combination_to_tree (aff_tree *comb)
446 tree type = comb->type;
447 tree expr = NULL_TREE;
448 unsigned i;
449 double_int off, sgn;
450 tree type1 = type;
451 if (POINTER_TYPE_P (type))
452 type1 = sizetype;
454 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
456 for (i = 0; i < comb->n; i++)
457 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
458 comb);
460 if (comb->rest)
461 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
463 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
464 unsigned. */
465 if (comb->offset.is_negative ())
467 off = -comb->offset;
468 sgn = double_int_minus_one;
470 else
472 off = comb->offset;
473 sgn = double_int_one;
475 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
476 comb);
479 /* Copies the tree elements of COMB to ensure that they are not shared. */
481 void
482 unshare_aff_combination (aff_tree *comb)
484 unsigned i;
486 for (i = 0; i < comb->n; i++)
487 comb->elts[i].val = unshare_expr (comb->elts[i].val);
488 if (comb->rest)
489 comb->rest = unshare_expr (comb->rest);
492 /* Remove M-th element from COMB. */
494 void
495 aff_combination_remove_elt (aff_tree *comb, unsigned m)
497 comb->n--;
498 if (m <= comb->n)
499 comb->elts[m] = comb->elts[comb->n];
500 if (comb->rest)
502 comb->elts[comb->n].coef = double_int_one;
503 comb->elts[comb->n].val = comb->rest;
504 comb->rest = NULL_TREE;
505 comb->n++;
509 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
510 C * COEF is added to R. */
513 static void
514 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
515 aff_tree *r)
517 unsigned i;
518 tree aval, type;
520 for (i = 0; i < c->n; i++)
522 aval = c->elts[i].val;
523 if (val)
525 type = TREE_TYPE (aval);
526 aval = fold_build2 (MULT_EXPR, type, aval,
527 fold_convert (type, val));
530 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
533 if (c->rest)
535 aval = c->rest;
536 if (val)
538 type = TREE_TYPE (aval);
539 aval = fold_build2 (MULT_EXPR, type, aval,
540 fold_convert (type, val));
543 aff_combination_add_elt (r, aval, coef);
546 if (val)
547 aff_combination_add_elt (r, val, coef * c->offset);
548 else
549 aff_combination_add_cst (r, coef * c->offset);
552 /* Multiplies C1 by C2, storing the result to R */
554 void
555 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
557 unsigned i;
558 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
560 aff_combination_zero (r, c1->type);
562 for (i = 0; i < c2->n; i++)
563 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
564 if (c2->rest)
565 aff_combination_add_product (c1, double_int_one, c2->rest, r);
566 aff_combination_add_product (c1, c2->offset, NULL, r);
569 /* Returns the element of COMB whose value is VAL, or NULL if no such
570 element exists. If IDX is not NULL, it is set to the index of VAL in
571 COMB. */
573 static struct aff_comb_elt *
574 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
576 unsigned i;
578 for (i = 0; i < comb->n; i++)
579 if (operand_equal_p (comb->elts[i].val, val, 0))
581 if (idx)
582 *idx = i;
584 return &comb->elts[i];
587 return NULL;
590 /* Element of the cache that maps ssa name NAME to its expanded form
591 as an affine expression EXPANSION. */
593 struct name_expansion
595 aff_tree expansion;
597 /* True if the expansion for the name is just being generated. */
598 unsigned in_progress : 1;
601 /* Expands SSA names in COMB recursively. CACHE is used to cache the
602 results. */
604 void
605 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
606 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
608 unsigned i;
609 aff_tree to_add, current, curre;
610 tree e, rhs;
611 gimple def;
612 double_int scale;
613 void **slot;
614 struct name_expansion *exp;
616 aff_combination_zero (&to_add, comb->type);
617 for (i = 0; i < comb->n; i++)
619 tree type, name;
620 enum tree_code code;
622 e = comb->elts[i].val;
623 type = TREE_TYPE (e);
624 name = e;
625 /* Look through some conversions. */
626 if (TREE_CODE (e) == NOP_EXPR
627 && (TYPE_PRECISION (type)
628 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
629 name = TREE_OPERAND (e, 0);
630 if (TREE_CODE (name) != SSA_NAME)
631 continue;
632 def = SSA_NAME_DEF_STMT (name);
633 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
634 continue;
636 code = gimple_assign_rhs_code (def);
637 if (code != SSA_NAME
638 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
639 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
640 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
641 continue;
643 /* We do not know whether the reference retains its value at the
644 place where the expansion is used. */
645 if (TREE_CODE_CLASS (code) == tcc_reference)
646 continue;
648 if (!*cache)
649 *cache = pointer_map_create ();
650 slot = pointer_map_insert (*cache, e);
651 exp = (struct name_expansion *) *slot;
653 if (!exp)
655 exp = XNEW (struct name_expansion);
656 exp->in_progress = 1;
657 *slot = exp;
658 /* In principle this is a generally valid folding, but
659 it is not unconditionally an optimization, so do it
660 here and not in fold_unary. */
661 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
662 than the type of X and overflow for the type of X is
663 undefined. */
664 if (e != name
665 && INTEGRAL_TYPE_P (type)
666 && INTEGRAL_TYPE_P (TREE_TYPE (name))
667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
668 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
669 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
670 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
671 rhs = fold_build2 (code, type,
672 fold_convert (type, gimple_assign_rhs1 (def)),
673 fold_convert (type, gimple_assign_rhs2 (def)));
674 else
676 rhs = gimple_assign_rhs_to_tree (def);
677 if (e != name)
678 rhs = fold_convert (type, rhs);
680 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
681 exp->expansion = current;
682 exp->in_progress = 0;
684 else
686 /* Since we follow the definitions in the SSA form, we should not
687 enter a cycle unless we pass through a phi node. */
688 gcc_assert (!exp->in_progress);
689 current = exp->expansion;
692 /* Accumulate the new terms to TO_ADD, so that we do not modify
693 COMB while traversing it; include the term -coef * E, to remove
694 it from COMB. */
695 scale = comb->elts[i].coef;
696 aff_combination_zero (&curre, comb->type);
697 aff_combination_add_elt (&curre, e, -scale);
698 aff_combination_scale (&current, scale);
699 aff_combination_add (&to_add, &current);
700 aff_combination_add (&to_add, &curre);
702 aff_combination_add (comb, &to_add);
705 /* Similar to tree_to_aff_combination, but follows SSA name definitions
706 and expands them recursively. CACHE is used to cache the expansions
707 of the ssa names, to avoid exponential time complexity for cases
708 like
710 a1 = a0 + a0;
711 a2 = a1 + a1;
712 a3 = a2 + a2;
713 ... */
715 void
716 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
717 struct pointer_map_t **cache)
719 tree_to_aff_combination (expr, type, comb);
720 aff_combination_expand (comb, cache);
723 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
724 pointer_map_traverse. */
726 static bool
727 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
728 void *data ATTRIBUTE_UNUSED)
730 struct name_expansion *const exp = (struct name_expansion *) *value;
732 free (exp);
733 return true;
736 /* Frees memory allocated for the CACHE used by
737 tree_to_aff_combination_expand. */
739 void
740 free_affine_expand_cache (struct pointer_map_t **cache)
742 if (!*cache)
743 return;
745 pointer_map_traverse (*cache, free_name_expansion, NULL);
746 pointer_map_destroy (*cache);
747 *cache = NULL;
750 /* If VAL != CST * DIV for any constant CST, returns false.
751 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
752 and if they are different, returns false. Finally, if neither of these
753 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
754 is set to true. */
756 static bool
757 double_int_constant_multiple_p (double_int val, double_int div,
758 bool *mult_set, double_int *mult)
760 double_int rem, cst;
762 if (val.is_zero ())
764 if (*mult_set && !mult->is_zero ())
765 return false;
766 *mult_set = true;
767 *mult = double_int_zero;
768 return true;
771 if (div.is_zero ())
772 return false;
774 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
775 if (!rem.is_zero ())
776 return false;
778 if (*mult_set && *mult != cst)
779 return false;
781 *mult_set = true;
782 *mult = cst;
783 return true;
786 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
787 X is stored to MULT. */
789 bool
790 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
791 double_int *mult)
793 bool mult_set = false;
794 unsigned i;
796 if (val->n == 0 && val->offset.is_zero ())
798 *mult = double_int_zero;
799 return true;
801 if (val->n != div->n)
802 return false;
804 if (val->rest || div->rest)
805 return false;
807 if (!double_int_constant_multiple_p (val->offset, div->offset,
808 &mult_set, mult))
809 return false;
811 for (i = 0; i < div->n; i++)
813 struct aff_comb_elt *elt
814 = aff_combination_find_elt (val, div->elts[i].val, NULL);
815 if (!elt)
816 return false;
817 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
818 &mult_set, mult))
819 return false;
822 gcc_assert (mult_set);
823 return true;
826 /* Prints the affine VAL to the FILE. */
828 static void
829 print_aff (FILE *file, aff_tree *val)
831 unsigned i;
832 bool uns = TYPE_UNSIGNED (val->type);
833 if (POINTER_TYPE_P (val->type))
834 uns = false;
835 fprintf (file, "{\n type = ");
836 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
837 fprintf (file, "\n offset = ");
838 dump_double_int (file, val->offset, uns);
839 if (val->n > 0)
841 fprintf (file, "\n elements = {\n");
842 for (i = 0; i < val->n; i++)
844 fprintf (file, " [%d] = ", i);
845 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
847 fprintf (file, " * ");
848 dump_double_int (file, val->elts[i].coef, uns);
849 if (i != val->n - 1)
850 fprintf (file, ", \n");
852 fprintf (file, "\n }");
854 if (val->rest)
856 fprintf (file, "\n rest = ");
857 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
859 fprintf (file, "\n}");
862 /* Prints the affine VAL to the standard error, used for debugging. */
864 DEBUG_FUNCTION void
865 debug_aff (aff_tree *val)
867 print_aff (stderr, val);
868 fprintf (stderr, "\n");
871 /* Returns address of the reference REF in ADDR. The size of the accessed
872 location is stored to SIZE. */
874 void
875 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
877 HOST_WIDE_INT bitsize, bitpos;
878 tree toff;
879 enum machine_mode mode;
880 int uns, vol;
881 aff_tree tmp;
882 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
883 &uns, &vol, false);
884 tree base_addr = build_fold_addr_expr (base);
886 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
888 tree_to_aff_combination (base_addr, sizetype, addr);
890 if (toff)
892 tree_to_aff_combination (toff, sizetype, &tmp);
893 aff_combination_add (addr, &tmp);
896 aff_combination_const (&tmp, sizetype,
897 double_int::from_shwi (bitpos / BITS_PER_UNIT));
898 aff_combination_add (addr, &tmp);
900 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
903 /* Returns true if a region of size SIZE1 at position 0 and a region of
904 size SIZE2 at position DIFF cannot overlap. */
906 bool
907 aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
909 double_int d, bound;
911 /* Unless the difference is a constant, we fail. */
912 if (diff->n != 0)
913 return false;
915 d = diff->offset;
916 if (d.is_negative ())
918 /* The second object is before the first one, we succeed if the last
919 element of the second object is before the start of the first one. */
920 bound = d + size2 + double_int_minus_one;
921 return bound.is_negative ();
923 else
925 /* We succeed if the second object starts after the first one ends. */
926 return size1.sle (d);