gcc/testsuite/
[official-gcc.git] / gcc / tree-affine.c
blob0b8577836d61fe3d5dac4ffbee2bebbc3be99d6f
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "expr.h"
25 #include "tree-pretty-print.h"
26 #include "pointer-set.h"
27 #include "tree-affine.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimplify.h"
35 #include "flags.h"
36 #include "dumpfile.h"
37 #include "cfgexpand.h"
38 #include "wide-int-print.h"
40 /* Extends CST as appropriate for the affine combinations COMB. */
42 widest_int
43 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
45 return wi::sext (cst, TYPE_PRECISION (comb->type));
48 /* Initializes affine combination COMB so that its value is zero in TYPE. */
50 static void
51 aff_combination_zero (aff_tree *comb, tree type)
53 int i;
54 comb->type = type;
55 comb->offset = 0;
56 comb->n = 0;
57 for (i = 0; i < MAX_AFF_ELTS; i++)
58 comb->elts[i].coef = 0;
59 comb->rest = NULL_TREE;
62 /* Sets COMB to CST. */
64 void
65 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
67 aff_combination_zero (comb, type);
68 comb->offset = wide_int_ext_for_comb (cst, comb);;
71 /* Sets COMB to single element ELT. */
73 void
74 aff_combination_elt (aff_tree *comb, tree type, tree elt)
76 aff_combination_zero (comb, type);
78 comb->n = 1;
79 comb->elts[0].val = elt;
80 comb->elts[0].coef = 1;
83 /* Scales COMB by SCALE. */
85 void
86 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
88 unsigned i, j;
90 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
91 if (scale == 1)
92 return;
94 if (scale == 0)
96 aff_combination_zero (comb, comb->type);
97 return;
100 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
101 for (i = 0, j = 0; i < comb->n; i++)
103 widest_int new_coef
104 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
105 /* A coefficient may become zero due to overflow. Remove the zero
106 elements. */
107 if (new_coef == 0)
108 continue;
109 comb->elts[j].coef = new_coef;
110 comb->elts[j].val = comb->elts[i].val;
111 j++;
113 comb->n = j;
115 if (comb->rest)
117 tree type = comb->type;
118 if (POINTER_TYPE_P (type))
119 type = sizetype;
120 if (comb->n < MAX_AFF_ELTS)
122 comb->elts[comb->n].coef = scale;
123 comb->elts[comb->n].val = comb->rest;
124 comb->rest = NULL_TREE;
125 comb->n++;
127 else
128 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
129 wide_int_to_tree (type, scale));
133 /* Adds ELT * SCALE to COMB. */
135 void
136 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
138 unsigned i;
139 tree type;
141 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
142 if (scale == 0)
143 return;
145 for (i = 0; i < comb->n; i++)
146 if (operand_equal_p (comb->elts[i].val, elt, 0))
148 widest_int new_coef
149 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
150 if (new_coef != 0)
152 comb->elts[i].coef = new_coef;
153 return;
156 comb->n--;
157 comb->elts[i] = comb->elts[comb->n];
159 if (comb->rest)
161 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
162 comb->elts[comb->n].coef = 1;
163 comb->elts[comb->n].val = comb->rest;
164 comb->rest = NULL_TREE;
165 comb->n++;
167 return;
169 if (comb->n < MAX_AFF_ELTS)
171 comb->elts[comb->n].coef = scale;
172 comb->elts[comb->n].val = elt;
173 comb->n++;
174 return;
177 type = comb->type;
178 if (POINTER_TYPE_P (type))
179 type = sizetype;
181 if (scale == 1)
182 elt = fold_convert (type, elt);
183 else
184 elt = fold_build2 (MULT_EXPR, type,
185 fold_convert (type, elt),
186 wide_int_to_tree (type, scale));
188 if (comb->rest)
189 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
190 elt);
191 else
192 comb->rest = elt;
195 /* Adds CST to C. */
197 static void
198 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
200 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
203 /* Adds COMB2 to COMB1. */
205 void
206 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
208 unsigned i;
210 aff_combination_add_cst (comb1, comb2->offset);
211 for (i = 0; i < comb2->n; i++)
212 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
213 if (comb2->rest)
214 aff_combination_add_elt (comb1, comb2->rest, 1);
217 /* Converts affine combination COMB to TYPE. */
219 void
220 aff_combination_convert (aff_tree *comb, tree type)
222 unsigned i, j;
223 tree comb_type = comb->type;
225 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
227 tree val = fold_convert (type, aff_combination_to_tree (comb));
228 tree_to_aff_combination (val, type, comb);
229 return;
232 comb->type = type;
233 if (comb->rest && !POINTER_TYPE_P (type))
234 comb->rest = fold_convert (type, comb->rest);
236 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
237 return;
239 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
240 for (i = j = 0; i < comb->n; i++)
242 if (comb->elts[i].coef == 0)
243 continue;
244 comb->elts[j].coef = comb->elts[i].coef;
245 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
246 j++;
249 comb->n = j;
250 if (comb->n < MAX_AFF_ELTS && comb->rest)
252 comb->elts[comb->n].coef = 1;
253 comb->elts[comb->n].val = comb->rest;
254 comb->rest = NULL_TREE;
255 comb->n++;
259 /* Splits EXPR into an affine combination of parts. */
261 void
262 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
264 aff_tree tmp;
265 enum tree_code code;
266 tree cst, core, toffset;
267 HOST_WIDE_INT bitpos, bitsize;
268 enum machine_mode mode;
269 int unsignedp, volatilep;
271 STRIP_NOPS (expr);
273 code = TREE_CODE (expr);
274 switch (code)
276 case INTEGER_CST:
277 aff_combination_const (comb, type, wi::to_widest (expr));
278 return;
280 case POINTER_PLUS_EXPR:
281 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
282 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
283 aff_combination_add (comb, &tmp);
284 return;
286 case PLUS_EXPR:
287 case MINUS_EXPR:
288 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
289 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
290 if (code == MINUS_EXPR)
291 aff_combination_scale (&tmp, -1);
292 aff_combination_add (comb, &tmp);
293 return;
295 case MULT_EXPR:
296 cst = TREE_OPERAND (expr, 1);
297 if (TREE_CODE (cst) != INTEGER_CST)
298 break;
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300 aff_combination_scale (comb, wi::to_widest (cst));
301 return;
303 case NEGATE_EXPR:
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, -1);
306 return;
308 case BIT_NOT_EXPR:
309 /* ~x = -x - 1 */
310 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
311 aff_combination_scale (comb, -1);
312 aff_combination_add_cst (comb, -1);
313 return;
315 case ADDR_EXPR:
316 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
317 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
319 expr = TREE_OPERAND (expr, 0);
320 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
321 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
322 aff_combination_add (comb, &tmp);
323 return;
325 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
326 &toffset, &mode, &unsignedp, &volatilep,
327 false);
328 if (bitpos % BITS_PER_UNIT != 0)
329 break;
330 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
331 if (TREE_CODE (core) == MEM_REF)
333 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
334 core = TREE_OPERAND (core, 0);
336 else
337 core = build_fold_addr_expr (core);
339 if (TREE_CODE (core) == ADDR_EXPR)
340 aff_combination_add_elt (comb, core, 1);
341 else
343 tree_to_aff_combination (core, type, &tmp);
344 aff_combination_add (comb, &tmp);
346 if (toffset)
348 tree_to_aff_combination (toffset, type, &tmp);
349 aff_combination_add (comb, &tmp);
351 return;
353 case MEM_REF:
354 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
355 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
356 type, comb);
357 else if (integer_zerop (TREE_OPERAND (expr, 1)))
359 aff_combination_elt (comb, type, expr);
360 return;
362 else
363 aff_combination_elt (comb, type,
364 build2 (MEM_REF, TREE_TYPE (expr),
365 TREE_OPERAND (expr, 0),
366 build_int_cst
367 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
368 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
369 aff_combination_add (comb, &tmp);
370 return;
372 default:
373 break;
376 aff_combination_elt (comb, type, expr);
379 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
380 combination COMB. */
382 static tree
383 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
384 aff_tree *comb ATTRIBUTE_UNUSED)
386 enum tree_code code;
387 tree type1 = type;
388 if (POINTER_TYPE_P (type))
389 type1 = sizetype;
391 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
393 if (scale == -1
394 && POINTER_TYPE_P (TREE_TYPE (elt)))
396 elt = convert_to_ptrofftype (elt);
397 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
398 scale = 1;
401 if (scale == 1)
403 if (!expr)
405 if (POINTER_TYPE_P (TREE_TYPE (elt)))
406 return elt;
407 else
408 return fold_convert (type1, elt);
411 if (POINTER_TYPE_P (TREE_TYPE (expr)))
412 return fold_build_pointer_plus (expr, elt);
413 if (POINTER_TYPE_P (TREE_TYPE (elt)))
414 return fold_build_pointer_plus (elt, expr);
415 return fold_build2 (PLUS_EXPR, type1,
416 expr, fold_convert (type1, elt));
419 if (scale == -1)
421 if (!expr)
422 return fold_build1 (NEGATE_EXPR, type1,
423 fold_convert (type1, elt));
425 if (POINTER_TYPE_P (TREE_TYPE (expr)))
427 elt = convert_to_ptrofftype (elt);
428 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
429 return fold_build_pointer_plus (expr, elt);
431 return fold_build2 (MINUS_EXPR, type1,
432 expr, fold_convert (type1, elt));
435 elt = fold_convert (type1, elt);
436 if (!expr)
437 return fold_build2 (MULT_EXPR, type1, elt,
438 wide_int_to_tree (type1, scale));
440 if (wi::neg_p (scale))
442 code = MINUS_EXPR;
443 scale = -scale;
445 else
446 code = PLUS_EXPR;
448 elt = fold_build2 (MULT_EXPR, type1, elt,
449 wide_int_to_tree (type1, scale));
450 if (POINTER_TYPE_P (TREE_TYPE (expr)))
452 if (code == MINUS_EXPR)
453 elt = fold_build1 (NEGATE_EXPR, type1, elt);
454 return fold_build_pointer_plus (expr, elt);
456 return fold_build2 (code, type1, expr, elt);
459 /* Makes tree from the affine combination COMB. */
461 tree
462 aff_combination_to_tree (aff_tree *comb)
464 tree type = comb->type;
465 tree expr = NULL_TREE;
466 unsigned i;
467 widest_int off, sgn;
468 tree type1 = type;
469 if (POINTER_TYPE_P (type))
470 type1 = sizetype;
472 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
474 for (i = 0; i < comb->n; i++)
475 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
476 comb);
478 if (comb->rest)
479 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
481 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
482 unsigned. */
483 if (wi::neg_p (comb->offset))
485 off = -comb->offset;
486 sgn = -1;
488 else
490 off = comb->offset;
491 sgn = 1;
493 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
494 comb);
497 /* Copies the tree elements of COMB to ensure that they are not shared. */
499 void
500 unshare_aff_combination (aff_tree *comb)
502 unsigned i;
504 for (i = 0; i < comb->n; i++)
505 comb->elts[i].val = unshare_expr (comb->elts[i].val);
506 if (comb->rest)
507 comb->rest = unshare_expr (comb->rest);
510 /* Remove M-th element from COMB. */
512 void
513 aff_combination_remove_elt (aff_tree *comb, unsigned m)
515 comb->n--;
516 if (m <= comb->n)
517 comb->elts[m] = comb->elts[comb->n];
518 if (comb->rest)
520 comb->elts[comb->n].coef = 1;
521 comb->elts[comb->n].val = comb->rest;
522 comb->rest = NULL_TREE;
523 comb->n++;
527 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
528 C * COEF is added to R. */
531 static void
532 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
533 aff_tree *r)
535 unsigned i;
536 tree aval, type;
538 for (i = 0; i < c->n; i++)
540 aval = c->elts[i].val;
541 if (val)
543 type = TREE_TYPE (aval);
544 aval = fold_build2 (MULT_EXPR, type, aval,
545 fold_convert (type, val));
548 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
551 if (c->rest)
553 aval = c->rest;
554 if (val)
556 type = TREE_TYPE (aval);
557 aval = fold_build2 (MULT_EXPR, type, aval,
558 fold_convert (type, val));
561 aff_combination_add_elt (r, aval, coef);
564 if (val)
565 aff_combination_add_elt (r, val, coef * c->offset);
566 else
567 aff_combination_add_cst (r, coef * c->offset);
570 /* Multiplies C1 by C2, storing the result to R */
572 void
573 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
575 unsigned i;
576 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
578 aff_combination_zero (r, c1->type);
580 for (i = 0; i < c2->n; i++)
581 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
582 if (c2->rest)
583 aff_combination_add_product (c1, 1, c2->rest, r);
584 aff_combination_add_product (c1, c2->offset, NULL, r);
587 /* Returns the element of COMB whose value is VAL, or NULL if no such
588 element exists. If IDX is not NULL, it is set to the index of VAL in
589 COMB. */
591 static struct aff_comb_elt *
592 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
594 unsigned i;
596 for (i = 0; i < comb->n; i++)
597 if (operand_equal_p (comb->elts[i].val, val, 0))
599 if (idx)
600 *idx = i;
602 return &comb->elts[i];
605 return NULL;
608 /* Element of the cache that maps ssa name NAME to its expanded form
609 as an affine expression EXPANSION. */
611 struct name_expansion
613 aff_tree expansion;
615 /* True if the expansion for the name is just being generated. */
616 unsigned in_progress : 1;
619 /* Expands SSA names in COMB recursively. CACHE is used to cache the
620 results. */
622 void
623 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
624 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
626 unsigned i;
627 aff_tree to_add, current, curre;
628 tree e, rhs;
629 gimple def;
630 widest_int scale;
631 void **slot;
632 struct name_expansion *exp;
634 aff_combination_zero (&to_add, comb->type);
635 for (i = 0; i < comb->n; i++)
637 tree type, name;
638 enum tree_code code;
640 e = comb->elts[i].val;
641 type = TREE_TYPE (e);
642 name = e;
643 /* Look through some conversions. */
644 if (TREE_CODE (e) == NOP_EXPR
645 && (TYPE_PRECISION (type)
646 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
647 name = TREE_OPERAND (e, 0);
648 if (TREE_CODE (name) != SSA_NAME)
649 continue;
650 def = SSA_NAME_DEF_STMT (name);
651 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
652 continue;
654 code = gimple_assign_rhs_code (def);
655 if (code != SSA_NAME
656 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
657 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
658 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
659 continue;
661 /* We do not know whether the reference retains its value at the
662 place where the expansion is used. */
663 if (TREE_CODE_CLASS (code) == tcc_reference)
664 continue;
666 if (!*cache)
667 *cache = pointer_map_create ();
668 slot = pointer_map_insert (*cache, e);
669 exp = (struct name_expansion *) *slot;
671 if (!exp)
673 exp = XNEW (struct name_expansion);
674 exp->in_progress = 1;
675 *slot = exp;
676 /* In principle this is a generally valid folding, but
677 it is not unconditionally an optimization, so do it
678 here and not in fold_unary. */
679 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
680 than the type of X and overflow for the type of X is
681 undefined. */
682 if (e != name
683 && INTEGRAL_TYPE_P (type)
684 && INTEGRAL_TYPE_P (TREE_TYPE (name))
685 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
686 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
687 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
688 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
689 rhs = fold_build2 (code, type,
690 fold_convert (type, gimple_assign_rhs1 (def)),
691 fold_convert (type, gimple_assign_rhs2 (def)));
692 else
694 rhs = gimple_assign_rhs_to_tree (def);
695 if (e != name)
696 rhs = fold_convert (type, rhs);
698 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
699 exp->expansion = current;
700 exp->in_progress = 0;
702 else
704 /* Since we follow the definitions in the SSA form, we should not
705 enter a cycle unless we pass through a phi node. */
706 gcc_assert (!exp->in_progress);
707 current = exp->expansion;
710 /* Accumulate the new terms to TO_ADD, so that we do not modify
711 COMB while traversing it; include the term -coef * E, to remove
712 it from COMB. */
713 scale = comb->elts[i].coef;
714 aff_combination_zero (&curre, comb->type);
715 aff_combination_add_elt (&curre, e, -scale);
716 aff_combination_scale (&current, scale);
717 aff_combination_add (&to_add, &current);
718 aff_combination_add (&to_add, &curre);
720 aff_combination_add (comb, &to_add);
723 /* Similar to tree_to_aff_combination, but follows SSA name definitions
724 and expands them recursively. CACHE is used to cache the expansions
725 of the ssa names, to avoid exponential time complexity for cases
726 like
728 a1 = a0 + a0;
729 a2 = a1 + a1;
730 a3 = a2 + a2;
731 ... */
733 void
734 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
735 struct pointer_map_t **cache)
737 tree_to_aff_combination (expr, type, comb);
738 aff_combination_expand (comb, cache);
741 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
742 pointer_map_traverse. */
744 static bool
745 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
746 void *data ATTRIBUTE_UNUSED)
748 struct name_expansion *const exp = (struct name_expansion *) *value;
750 free (exp);
751 return true;
754 /* Frees memory allocated for the CACHE used by
755 tree_to_aff_combination_expand. */
757 void
758 free_affine_expand_cache (struct pointer_map_t **cache)
760 if (!*cache)
761 return;
763 pointer_map_traverse (*cache, free_name_expansion, NULL);
764 pointer_map_destroy (*cache);
765 *cache = NULL;
768 /* If VAL != CST * DIV for any constant CST, returns false.
769 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
770 and if they are different, returns false. Finally, if neither of these
771 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
772 is set to true. */
774 static bool
775 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
776 bool *mult_set, widest_int *mult)
778 widest_int rem, cst;
780 if (val == 0)
782 if (*mult_set && mult != 0)
783 return false;
784 *mult_set = true;
785 *mult = 0;
786 return true;
789 if (div == 0)
790 return false;
792 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
793 return false;
795 if (*mult_set && *mult != cst)
796 return false;
798 *mult_set = true;
799 *mult = cst;
800 return true;
803 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
804 X is stored to MULT. */
806 bool
807 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
808 widest_int *mult)
810 bool mult_set = false;
811 unsigned i;
813 if (val->n == 0 && val->offset == 0)
815 *mult = 0;
816 return true;
818 if (val->n != div->n)
819 return false;
821 if (val->rest || div->rest)
822 return false;
824 if (!wide_int_constant_multiple_p (val->offset, div->offset,
825 &mult_set, mult))
826 return false;
828 for (i = 0; i < div->n; i++)
830 struct aff_comb_elt *elt
831 = aff_combination_find_elt (val, div->elts[i].val, NULL);
832 if (!elt)
833 return false;
834 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
835 &mult_set, mult))
836 return false;
839 gcc_assert (mult_set);
840 return true;
843 /* Prints the affine VAL to the FILE. */
845 static void
846 print_aff (FILE *file, aff_tree *val)
848 unsigned i;
849 signop sgn = TYPE_SIGN (val->type);
850 if (POINTER_TYPE_P (val->type))
851 sgn = SIGNED;
852 fprintf (file, "{\n type = ");
853 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
854 fprintf (file, "\n offset = ");
855 print_dec (val->offset, file, sgn);
856 if (val->n > 0)
858 fprintf (file, "\n elements = {\n");
859 for (i = 0; i < val->n; i++)
861 fprintf (file, " [%d] = ", i);
862 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
864 fprintf (file, " * ");
865 print_dec (val->elts[i].coef, file, sgn);
866 if (i != val->n - 1)
867 fprintf (file, ", \n");
869 fprintf (file, "\n }");
871 if (val->rest)
873 fprintf (file, "\n rest = ");
874 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
876 fprintf (file, "\n}");
879 /* Prints the affine VAL to the standard error, used for debugging. */
881 DEBUG_FUNCTION void
882 debug_aff (aff_tree *val)
884 print_aff (stderr, val);
885 fprintf (stderr, "\n");
888 /* Computes address of the reference REF in ADDR. The size of the accessed
889 location is stored to SIZE. Returns the ultimate containing object to
890 which REF refers. */
892 tree
893 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
895 HOST_WIDE_INT bitsize, bitpos;
896 tree toff;
897 enum machine_mode mode;
898 int uns, vol;
899 aff_tree tmp;
900 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
901 &uns, &vol, false);
902 tree base_addr = build_fold_addr_expr (base);
904 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
906 tree_to_aff_combination (base_addr, sizetype, addr);
908 if (toff)
910 tree_to_aff_combination (toff, sizetype, &tmp);
911 aff_combination_add (addr, &tmp);
914 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
915 aff_combination_add (addr, &tmp);
917 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
919 return base;
922 /* Returns true if a region of size SIZE1 at position 0 and a region of
923 size SIZE2 at position DIFF cannot overlap. */
925 bool
926 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
927 const widest_int &size2)
929 /* Unless the difference is a constant, we fail. */
930 if (diff->n != 0)
931 return false;
933 if (wi::neg_p (diff->offset))
935 /* The second object is before the first one, we succeed if the last
936 element of the second object is before the start of the first one. */
937 return wi::neg_p (diff->offset + size2 - 1);
939 else
941 /* We succeed if the second object starts after the first one ends. */
942 return wi::les_p (size1, diff->offset);