* configure.ac: If the compiler supports -Qunused-arguments, use
[official-gcc.git] / gcc / tree-affine.c
blob18b44c15ed3495430500a78543cbf96bea603754
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "expr.h"
25 #include "tree-pretty-print.h"
26 #include "tree-affine.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
31 #include "is-a.h"
32 #include "gimple.h"
33 #include "gimplify.h"
34 #include "flags.h"
35 #include "dumpfile.h"
36 #include "cfgexpand.h"
37 #include "wide-int-print.h"
39 /* Extends CST as appropriate for the affine combinations COMB. */
41 widest_int
42 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
44 return wi::sext (cst, TYPE_PRECISION (comb->type));
47 /* Initializes affine combination COMB so that its value is zero in TYPE. */
49 static void
50 aff_combination_zero (aff_tree *comb, tree type)
52 int i;
53 comb->type = type;
54 comb->offset = 0;
55 comb->n = 0;
56 for (i = 0; i < MAX_AFF_ELTS; i++)
57 comb->elts[i].coef = 0;
58 comb->rest = NULL_TREE;
61 /* Sets COMB to CST. */
63 void
64 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
66 aff_combination_zero (comb, type);
67 comb->offset = wide_int_ext_for_comb (cst, comb);;
70 /* Sets COMB to single element ELT. */
72 void
73 aff_combination_elt (aff_tree *comb, tree type, tree elt)
75 aff_combination_zero (comb, type);
77 comb->n = 1;
78 comb->elts[0].val = elt;
79 comb->elts[0].coef = 1;
82 /* Scales COMB by SCALE. */
84 void
85 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
87 unsigned i, j;
89 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
90 if (scale == 1)
91 return;
93 if (scale == 0)
95 aff_combination_zero (comb, comb->type);
96 return;
99 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
100 for (i = 0, j = 0; i < comb->n; i++)
102 widest_int new_coef
103 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
104 /* A coefficient may become zero due to overflow. Remove the zero
105 elements. */
106 if (new_coef == 0)
107 continue;
108 comb->elts[j].coef = new_coef;
109 comb->elts[j].val = comb->elts[i].val;
110 j++;
112 comb->n = j;
114 if (comb->rest)
116 tree type = comb->type;
117 if (POINTER_TYPE_P (type))
118 type = sizetype;
119 if (comb->n < MAX_AFF_ELTS)
121 comb->elts[comb->n].coef = scale;
122 comb->elts[comb->n].val = comb->rest;
123 comb->rest = NULL_TREE;
124 comb->n++;
126 else
127 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
128 wide_int_to_tree (type, scale));
132 /* Adds ELT * SCALE to COMB. */
134 void
135 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
137 unsigned i;
138 tree type;
140 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
141 if (scale == 0)
142 return;
144 for (i = 0; i < comb->n; i++)
145 if (operand_equal_p (comb->elts[i].val, elt, 0))
147 widest_int new_coef
148 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
149 if (new_coef != 0)
151 comb->elts[i].coef = new_coef;
152 return;
155 comb->n--;
156 comb->elts[i] = comb->elts[comb->n];
158 if (comb->rest)
160 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
161 comb->elts[comb->n].coef = 1;
162 comb->elts[comb->n].val = comb->rest;
163 comb->rest = NULL_TREE;
164 comb->n++;
166 return;
168 if (comb->n < MAX_AFF_ELTS)
170 comb->elts[comb->n].coef = scale;
171 comb->elts[comb->n].val = elt;
172 comb->n++;
173 return;
176 type = comb->type;
177 if (POINTER_TYPE_P (type))
178 type = sizetype;
180 if (scale == 1)
181 elt = fold_convert (type, elt);
182 else
183 elt = fold_build2 (MULT_EXPR, type,
184 fold_convert (type, elt),
185 wide_int_to_tree (type, scale));
187 if (comb->rest)
188 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
189 elt);
190 else
191 comb->rest = elt;
194 /* Adds CST to C. */
196 static void
197 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
199 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
202 /* Adds COMB2 to COMB1. */
204 void
205 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
207 unsigned i;
209 aff_combination_add_cst (comb1, comb2->offset);
210 for (i = 0; i < comb2->n; i++)
211 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
212 if (comb2->rest)
213 aff_combination_add_elt (comb1, comb2->rest, 1);
216 /* Converts affine combination COMB to TYPE. */
218 void
219 aff_combination_convert (aff_tree *comb, tree type)
221 unsigned i, j;
222 tree comb_type = comb->type;
224 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
226 tree val = fold_convert (type, aff_combination_to_tree (comb));
227 tree_to_aff_combination (val, type, comb);
228 return;
231 comb->type = type;
232 if (comb->rest && !POINTER_TYPE_P (type))
233 comb->rest = fold_convert (type, comb->rest);
235 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
236 return;
238 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
239 for (i = j = 0; i < comb->n; i++)
241 if (comb->elts[i].coef == 0)
242 continue;
243 comb->elts[j].coef = comb->elts[i].coef;
244 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
245 j++;
248 comb->n = j;
249 if (comb->n < MAX_AFF_ELTS && comb->rest)
251 comb->elts[comb->n].coef = 1;
252 comb->elts[comb->n].val = comb->rest;
253 comb->rest = NULL_TREE;
254 comb->n++;
258 /* Splits EXPR into an affine combination of parts. */
260 void
261 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
263 aff_tree tmp;
264 enum tree_code code;
265 tree cst, core, toffset;
266 HOST_WIDE_INT bitpos, bitsize;
267 enum machine_mode mode;
268 int unsignedp, volatilep;
270 STRIP_NOPS (expr);
272 code = TREE_CODE (expr);
273 switch (code)
275 case INTEGER_CST:
276 aff_combination_const (comb, type, wi::to_widest (expr));
277 return;
279 case POINTER_PLUS_EXPR:
280 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
281 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
282 aff_combination_add (comb, &tmp);
283 return;
285 case PLUS_EXPR:
286 case MINUS_EXPR:
287 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
288 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
289 if (code == MINUS_EXPR)
290 aff_combination_scale (&tmp, -1);
291 aff_combination_add (comb, &tmp);
292 return;
294 case MULT_EXPR:
295 cst = TREE_OPERAND (expr, 1);
296 if (TREE_CODE (cst) != INTEGER_CST)
297 break;
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 aff_combination_scale (comb, wi::to_widest (cst));
300 return;
302 case NEGATE_EXPR:
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304 aff_combination_scale (comb, -1);
305 return;
307 case BIT_NOT_EXPR:
308 /* ~x = -x - 1 */
309 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
310 aff_combination_scale (comb, -1);
311 aff_combination_add_cst (comb, -1);
312 return;
314 case ADDR_EXPR:
315 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
316 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
318 expr = TREE_OPERAND (expr, 0);
319 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
320 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
321 aff_combination_add (comb, &tmp);
322 return;
324 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
325 &toffset, &mode, &unsignedp, &volatilep,
326 false);
327 if (bitpos % BITS_PER_UNIT != 0)
328 break;
329 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
330 if (TREE_CODE (core) == MEM_REF)
332 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
333 core = TREE_OPERAND (core, 0);
335 else
336 core = build_fold_addr_expr (core);
338 if (TREE_CODE (core) == ADDR_EXPR)
339 aff_combination_add_elt (comb, core, 1);
340 else
342 tree_to_aff_combination (core, type, &tmp);
343 aff_combination_add (comb, &tmp);
345 if (toffset)
347 tree_to_aff_combination (toffset, type, &tmp);
348 aff_combination_add (comb, &tmp);
350 return;
352 case MEM_REF:
353 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
354 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
355 type, comb);
356 else if (integer_zerop (TREE_OPERAND (expr, 1)))
358 aff_combination_elt (comb, type, expr);
359 return;
361 else
362 aff_combination_elt (comb, type,
363 build2 (MEM_REF, TREE_TYPE (expr),
364 TREE_OPERAND (expr, 0),
365 build_int_cst
366 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
367 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
368 aff_combination_add (comb, &tmp);
369 return;
371 default:
372 break;
375 aff_combination_elt (comb, type, expr);
378 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
379 combination COMB. */
381 static tree
382 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
383 aff_tree *comb ATTRIBUTE_UNUSED)
385 enum tree_code code;
386 tree type1 = type;
387 if (POINTER_TYPE_P (type))
388 type1 = sizetype;
390 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
392 if (scale == -1
393 && POINTER_TYPE_P (TREE_TYPE (elt)))
395 elt = convert_to_ptrofftype (elt);
396 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
397 scale = 1;
400 if (scale == 1)
402 if (!expr)
404 if (POINTER_TYPE_P (TREE_TYPE (elt)))
405 return elt;
406 else
407 return fold_convert (type1, elt);
410 if (POINTER_TYPE_P (TREE_TYPE (expr)))
411 return fold_build_pointer_plus (expr, elt);
412 if (POINTER_TYPE_P (TREE_TYPE (elt)))
413 return fold_build_pointer_plus (elt, expr);
414 return fold_build2 (PLUS_EXPR, type1,
415 expr, fold_convert (type1, elt));
418 if (scale == -1)
420 if (!expr)
421 return fold_build1 (NEGATE_EXPR, type1,
422 fold_convert (type1, elt));
424 if (POINTER_TYPE_P (TREE_TYPE (expr)))
426 elt = convert_to_ptrofftype (elt);
427 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
428 return fold_build_pointer_plus (expr, elt);
430 return fold_build2 (MINUS_EXPR, type1,
431 expr, fold_convert (type1, elt));
434 elt = fold_convert (type1, elt);
435 if (!expr)
436 return fold_build2 (MULT_EXPR, type1, elt,
437 wide_int_to_tree (type1, scale));
439 if (wi::neg_p (scale))
441 code = MINUS_EXPR;
442 scale = -scale;
444 else
445 code = PLUS_EXPR;
447 elt = fold_build2 (MULT_EXPR, type1, elt,
448 wide_int_to_tree (type1, scale));
449 if (POINTER_TYPE_P (TREE_TYPE (expr)))
451 if (code == MINUS_EXPR)
452 elt = fold_build1 (NEGATE_EXPR, type1, elt);
453 return fold_build_pointer_plus (expr, elt);
455 return fold_build2 (code, type1, expr, elt);
458 /* Makes tree from the affine combination COMB. */
460 tree
461 aff_combination_to_tree (aff_tree *comb)
463 tree type = comb->type;
464 tree expr = NULL_TREE;
465 unsigned i;
466 widest_int off, sgn;
467 tree type1 = type;
468 if (POINTER_TYPE_P (type))
469 type1 = sizetype;
471 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
473 for (i = 0; i < comb->n; i++)
474 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
475 comb);
477 if (comb->rest)
478 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
480 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
481 unsigned. */
482 if (wi::neg_p (comb->offset))
484 off = -comb->offset;
485 sgn = -1;
487 else
489 off = comb->offset;
490 sgn = 1;
492 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
493 comb);
496 /* Copies the tree elements of COMB to ensure that they are not shared. */
498 void
499 unshare_aff_combination (aff_tree *comb)
501 unsigned i;
503 for (i = 0; i < comb->n; i++)
504 comb->elts[i].val = unshare_expr (comb->elts[i].val);
505 if (comb->rest)
506 comb->rest = unshare_expr (comb->rest);
509 /* Remove M-th element from COMB. */
511 void
512 aff_combination_remove_elt (aff_tree *comb, unsigned m)
514 comb->n--;
515 if (m <= comb->n)
516 comb->elts[m] = comb->elts[comb->n];
517 if (comb->rest)
519 comb->elts[comb->n].coef = 1;
520 comb->elts[comb->n].val = comb->rest;
521 comb->rest = NULL_TREE;
522 comb->n++;
526 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
527 C * COEF is added to R. */
530 static void
531 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
532 aff_tree *r)
534 unsigned i;
535 tree aval, type;
537 for (i = 0; i < c->n; i++)
539 aval = c->elts[i].val;
540 if (val)
542 type = TREE_TYPE (aval);
543 aval = fold_build2 (MULT_EXPR, type, aval,
544 fold_convert (type, val));
547 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
550 if (c->rest)
552 aval = c->rest;
553 if (val)
555 type = TREE_TYPE (aval);
556 aval = fold_build2 (MULT_EXPR, type, aval,
557 fold_convert (type, val));
560 aff_combination_add_elt (r, aval, coef);
563 if (val)
564 aff_combination_add_elt (r, val, coef * c->offset);
565 else
566 aff_combination_add_cst (r, coef * c->offset);
569 /* Multiplies C1 by C2, storing the result to R */
571 void
572 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
574 unsigned i;
575 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
577 aff_combination_zero (r, c1->type);
579 for (i = 0; i < c2->n; i++)
580 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
581 if (c2->rest)
582 aff_combination_add_product (c1, 1, c2->rest, r);
583 aff_combination_add_product (c1, c2->offset, NULL, r);
586 /* Returns the element of COMB whose value is VAL, or NULL if no such
587 element exists. If IDX is not NULL, it is set to the index of VAL in
588 COMB. */
590 static struct aff_comb_elt *
591 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
593 unsigned i;
595 for (i = 0; i < comb->n; i++)
596 if (operand_equal_p (comb->elts[i].val, val, 0))
598 if (idx)
599 *idx = i;
601 return &comb->elts[i];
604 return NULL;
607 /* Element of the cache that maps ssa name NAME to its expanded form
608 as an affine expression EXPANSION. */
610 struct name_expansion
612 aff_tree expansion;
614 /* True if the expansion for the name is just being generated. */
615 unsigned in_progress : 1;
618 /* Expands SSA names in COMB recursively. CACHE is used to cache the
619 results. */
621 void
622 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
623 hash_map<tree, name_expansion *> **cache)
625 unsigned i;
626 aff_tree to_add, current, curre;
627 tree e, rhs;
628 gimple def;
629 widest_int scale;
630 struct name_expansion *exp;
632 aff_combination_zero (&to_add, comb->type);
633 for (i = 0; i < comb->n; i++)
635 tree type, name;
636 enum tree_code code;
638 e = comb->elts[i].val;
639 type = TREE_TYPE (e);
640 name = e;
641 /* Look through some conversions. */
642 if (TREE_CODE (e) == NOP_EXPR
643 && (TYPE_PRECISION (type)
644 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
645 name = TREE_OPERAND (e, 0);
646 if (TREE_CODE (name) != SSA_NAME)
647 continue;
648 def = SSA_NAME_DEF_STMT (name);
649 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
650 continue;
652 code = gimple_assign_rhs_code (def);
653 if (code != SSA_NAME
654 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
655 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
656 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
657 continue;
659 /* We do not know whether the reference retains its value at the
660 place where the expansion is used. */
661 if (TREE_CODE_CLASS (code) == tcc_reference)
662 continue;
664 if (!*cache)
665 *cache = new hash_map<tree, name_expansion *>;
666 name_expansion **slot = &(*cache)->get_or_insert (e);
667 exp = *slot;
669 if (!exp)
671 exp = XNEW (struct name_expansion);
672 exp->in_progress = 1;
673 *slot = exp;
674 /* In principle this is a generally valid folding, but
675 it is not unconditionally an optimization, so do it
676 here and not in fold_unary. */
677 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
678 than the type of X and overflow for the type of X is
679 undefined. */
680 if (e != name
681 && INTEGRAL_TYPE_P (type)
682 && INTEGRAL_TYPE_P (TREE_TYPE (name))
683 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
684 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
685 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
686 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
687 rhs = fold_build2 (code, type,
688 fold_convert (type, gimple_assign_rhs1 (def)),
689 fold_convert (type, gimple_assign_rhs2 (def)));
690 else
692 rhs = gimple_assign_rhs_to_tree (def);
693 if (e != name)
694 rhs = fold_convert (type, rhs);
696 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
697 exp->expansion = current;
698 exp->in_progress = 0;
700 else
702 /* Since we follow the definitions in the SSA form, we should not
703 enter a cycle unless we pass through a phi node. */
704 gcc_assert (!exp->in_progress);
705 current = exp->expansion;
708 /* Accumulate the new terms to TO_ADD, so that we do not modify
709 COMB while traversing it; include the term -coef * E, to remove
710 it from COMB. */
711 scale = comb->elts[i].coef;
712 aff_combination_zero (&curre, comb->type);
713 aff_combination_add_elt (&curre, e, -scale);
714 aff_combination_scale (&current, scale);
715 aff_combination_add (&to_add, &current);
716 aff_combination_add (&to_add, &curre);
718 aff_combination_add (comb, &to_add);
721 /* Similar to tree_to_aff_combination, but follows SSA name definitions
722 and expands them recursively. CACHE is used to cache the expansions
723 of the ssa names, to avoid exponential time complexity for cases
724 like
726 a1 = a0 + a0;
727 a2 = a1 + a1;
728 a3 = a2 + a2;
729 ... */
731 void
732 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
733 hash_map<tree, name_expansion *> **cache)
735 tree_to_aff_combination (expr, type, comb);
736 aff_combination_expand (comb, cache);
739 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
740 hash_map::traverse. */
742 bool
743 free_name_expansion (tree const &, name_expansion **value, void *)
745 free (*value);
746 return true;
749 /* Frees memory allocated for the CACHE used by
750 tree_to_aff_combination_expand. */
752 void
753 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
755 if (!*cache)
756 return;
758 (*cache)->traverse<void *, free_name_expansion> (NULL);
759 delete (*cache);
760 *cache = NULL;
763 /* If VAL != CST * DIV for any constant CST, returns false.
764 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
765 and if they are different, returns false. Finally, if neither of these
766 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
767 is set to true. */
769 static bool
770 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
771 bool *mult_set, widest_int *mult)
773 widest_int rem, cst;
775 if (val == 0)
777 if (*mult_set && mult != 0)
778 return false;
779 *mult_set = true;
780 *mult = 0;
781 return true;
784 if (div == 0)
785 return false;
787 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
788 return false;
790 if (*mult_set && *mult != cst)
791 return false;
793 *mult_set = true;
794 *mult = cst;
795 return true;
798 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
799 X is stored to MULT. */
801 bool
802 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
803 widest_int *mult)
805 bool mult_set = false;
806 unsigned i;
808 if (val->n == 0 && val->offset == 0)
810 *mult = 0;
811 return true;
813 if (val->n != div->n)
814 return false;
816 if (val->rest || div->rest)
817 return false;
819 if (!wide_int_constant_multiple_p (val->offset, div->offset,
820 &mult_set, mult))
821 return false;
823 for (i = 0; i < div->n; i++)
825 struct aff_comb_elt *elt
826 = aff_combination_find_elt (val, div->elts[i].val, NULL);
827 if (!elt)
828 return false;
829 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
830 &mult_set, mult))
831 return false;
834 gcc_assert (mult_set);
835 return true;
838 /* Prints the affine VAL to the FILE. */
840 static void
841 print_aff (FILE *file, aff_tree *val)
843 unsigned i;
844 signop sgn = TYPE_SIGN (val->type);
845 if (POINTER_TYPE_P (val->type))
846 sgn = SIGNED;
847 fprintf (file, "{\n type = ");
848 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
849 fprintf (file, "\n offset = ");
850 print_dec (val->offset, file, sgn);
851 if (val->n > 0)
853 fprintf (file, "\n elements = {\n");
854 for (i = 0; i < val->n; i++)
856 fprintf (file, " [%d] = ", i);
857 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
859 fprintf (file, " * ");
860 print_dec (val->elts[i].coef, file, sgn);
861 if (i != val->n - 1)
862 fprintf (file, ", \n");
864 fprintf (file, "\n }");
866 if (val->rest)
868 fprintf (file, "\n rest = ");
869 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
871 fprintf (file, "\n}");
874 /* Prints the affine VAL to the standard error, used for debugging. */
876 DEBUG_FUNCTION void
877 debug_aff (aff_tree *val)
879 print_aff (stderr, val);
880 fprintf (stderr, "\n");
883 /* Computes address of the reference REF in ADDR. The size of the accessed
884 location is stored to SIZE. Returns the ultimate containing object to
885 which REF refers. */
887 tree
888 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
890 HOST_WIDE_INT bitsize, bitpos;
891 tree toff;
892 enum machine_mode mode;
893 int uns, vol;
894 aff_tree tmp;
895 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
896 &uns, &vol, false);
897 tree base_addr = build_fold_addr_expr (base);
899 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
901 tree_to_aff_combination (base_addr, sizetype, addr);
903 if (toff)
905 tree_to_aff_combination (toff, sizetype, &tmp);
906 aff_combination_add (addr, &tmp);
909 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
910 aff_combination_add (addr, &tmp);
912 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
914 return base;
917 /* Returns true if a region of size SIZE1 at position 0 and a region of
918 size SIZE2 at position DIFF cannot overlap. */
920 bool
921 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
922 const widest_int &size2)
924 /* Unless the difference is a constant, we fail. */
925 if (diff->n != 0)
926 return false;
928 if (wi::neg_p (diff->offset))
930 /* The second object is before the first one, we succeed if the last
931 element of the second object is before the start of the first one. */
932 return wi::neg_p (diff->offset + size2 - 1);
934 else
936 /* We succeed if the second object starts after the first one ends. */
937 return wi::les_p (size1, diff->offset);