[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / tree-affine.c
blob30fff67b0054eb8ddbfc4c19f5425f5264cb7855
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pretty-print.h"
28 #include "fold-const.h"
29 #include "tree-affine.h"
30 #include "gimplify.h"
31 #include "dumpfile.h"
32 #include "cfgexpand.h"
34 /* Extends CST as appropriate for the affine combinations COMB. */
36 widest_int
37 wide_int_ext_for_comb (const widest_int &cst, tree type)
39 return wi::sext (cst, TYPE_PRECISION (type));
42 /* Initializes affine combination COMB so that its value is zero in TYPE. */
44 static void
45 aff_combination_zero (aff_tree *comb, tree type)
47 int i;
48 comb->type = type;
49 comb->offset = 0;
50 comb->n = 0;
51 for (i = 0; i < MAX_AFF_ELTS; i++)
52 comb->elts[i].coef = 0;
53 comb->rest = NULL_TREE;
56 /* Sets COMB to CST. */
58 void
59 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
61 aff_combination_zero (comb, type);
62 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
65 /* Sets COMB to single element ELT. */
67 void
68 aff_combination_elt (aff_tree *comb, tree type, tree elt)
70 aff_combination_zero (comb, type);
72 comb->n = 1;
73 comb->elts[0].val = elt;
74 comb->elts[0].coef = 1;
77 /* Scales COMB by SCALE. */
79 void
80 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
82 unsigned i, j;
84 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
85 if (scale == 1)
86 return;
88 if (scale == 0)
90 aff_combination_zero (comb, comb->type);
91 return;
94 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
95 for (i = 0, j = 0; i < comb->n; i++)
97 widest_int new_coef
98 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
99 /* A coefficient may become zero due to overflow. Remove the zero
100 elements. */
101 if (new_coef == 0)
102 continue;
103 comb->elts[j].coef = new_coef;
104 comb->elts[j].val = comb->elts[i].val;
105 j++;
107 comb->n = j;
109 if (comb->rest)
111 tree type = comb->type;
112 if (POINTER_TYPE_P (type))
113 type = sizetype;
114 if (comb->n < MAX_AFF_ELTS)
116 comb->elts[comb->n].coef = scale;
117 comb->elts[comb->n].val = comb->rest;
118 comb->rest = NULL_TREE;
119 comb->n++;
121 else
122 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
123 wide_int_to_tree (type, scale));
127 /* Adds ELT * SCALE to COMB. */
129 void
130 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
132 unsigned i;
133 tree type;
135 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
136 if (scale == 0)
137 return;
139 for (i = 0; i < comb->n; i++)
140 if (operand_equal_p (comb->elts[i].val, elt, 0))
142 widest_int new_coef
143 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
144 if (new_coef != 0)
146 comb->elts[i].coef = new_coef;
147 return;
150 comb->n--;
151 comb->elts[i] = comb->elts[comb->n];
153 if (comb->rest)
155 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
156 comb->elts[comb->n].coef = 1;
157 comb->elts[comb->n].val = comb->rest;
158 comb->rest = NULL_TREE;
159 comb->n++;
161 return;
163 if (comb->n < MAX_AFF_ELTS)
165 comb->elts[comb->n].coef = scale;
166 comb->elts[comb->n].val = elt;
167 comb->n++;
168 return;
171 type = comb->type;
172 if (POINTER_TYPE_P (type))
173 type = sizetype;
175 if (scale == 1)
176 elt = fold_convert (type, elt);
177 else
178 elt = fold_build2 (MULT_EXPR, type,
179 fold_convert (type, elt),
180 wide_int_to_tree (type, scale));
182 if (comb->rest)
183 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
184 elt);
185 else
186 comb->rest = elt;
189 /* Adds CST to C. */
191 static void
192 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
194 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
197 /* Adds COMB2 to COMB1. */
199 void
200 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
202 unsigned i;
204 aff_combination_add_cst (comb1, comb2->offset);
205 for (i = 0; i < comb2->n; i++)
206 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
207 if (comb2->rest)
208 aff_combination_add_elt (comb1, comb2->rest, 1);
211 /* Converts affine combination COMB to TYPE. */
213 void
214 aff_combination_convert (aff_tree *comb, tree type)
216 unsigned i, j;
217 tree comb_type = comb->type;
219 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
221 tree val = fold_convert (type, aff_combination_to_tree (comb));
222 tree_to_aff_combination (val, type, comb);
223 return;
226 comb->type = type;
227 if (comb->rest && !POINTER_TYPE_P (type))
228 comb->rest = fold_convert (type, comb->rest);
230 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
231 return;
233 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
234 for (i = j = 0; i < comb->n; i++)
236 if (comb->elts[i].coef == 0)
237 continue;
238 comb->elts[j].coef = comb->elts[i].coef;
239 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
240 j++;
243 comb->n = j;
244 if (comb->n < MAX_AFF_ELTS && comb->rest)
246 comb->elts[comb->n].coef = 1;
247 comb->elts[comb->n].val = comb->rest;
248 comb->rest = NULL_TREE;
249 comb->n++;
253 /* Splits EXPR into an affine combination of parts. */
255 void
256 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
258 aff_tree tmp;
259 enum tree_code code;
260 tree cst, core, toffset;
261 HOST_WIDE_INT bitpos, bitsize;
262 machine_mode mode;
263 int unsignedp, reversep, volatilep;
265 STRIP_NOPS (expr);
267 code = TREE_CODE (expr);
268 switch (code)
270 case INTEGER_CST:
271 aff_combination_const (comb, type, wi::to_widest (expr));
272 return;
274 case POINTER_PLUS_EXPR:
275 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
276 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
277 aff_combination_add (comb, &tmp);
278 return;
280 case PLUS_EXPR:
281 case MINUS_EXPR:
282 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
283 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
284 if (code == MINUS_EXPR)
285 aff_combination_scale (&tmp, -1);
286 aff_combination_add (comb, &tmp);
287 return;
289 case MULT_EXPR:
290 cst = TREE_OPERAND (expr, 1);
291 if (TREE_CODE (cst) != INTEGER_CST)
292 break;
293 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
294 aff_combination_scale (comb, wi::to_widest (cst));
295 return;
297 case NEGATE_EXPR:
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 aff_combination_scale (comb, -1);
300 return;
302 case BIT_NOT_EXPR:
303 /* ~x = -x - 1 */
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, -1);
306 aff_combination_add_cst (comb, -1);
307 return;
309 case ADDR_EXPR:
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
313 expr = TREE_OPERAND (expr, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
316 aff_combination_add (comb, &tmp);
317 return;
319 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
320 &toffset, &mode, &unsignedp, &reversep,
321 &volatilep);
322 if (bitpos % BITS_PER_UNIT != 0)
323 break;
324 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
325 if (TREE_CODE (core) == MEM_REF)
327 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
328 core = TREE_OPERAND (core, 0);
330 else
331 core = build_fold_addr_expr (core);
333 if (TREE_CODE (core) == ADDR_EXPR)
334 aff_combination_add_elt (comb, core, 1);
335 else
337 tree_to_aff_combination (core, type, &tmp);
338 aff_combination_add (comb, &tmp);
340 if (toffset)
342 tree_to_aff_combination (toffset, type, &tmp);
343 aff_combination_add (comb, &tmp);
345 return;
347 case MEM_REF:
348 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
349 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
350 type, comb);
351 else if (integer_zerop (TREE_OPERAND (expr, 1)))
353 aff_combination_elt (comb, type, expr);
354 return;
356 else
357 aff_combination_elt (comb, type,
358 build2 (MEM_REF, TREE_TYPE (expr),
359 TREE_OPERAND (expr, 0),
360 build_int_cst
361 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
362 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
363 aff_combination_add (comb, &tmp);
364 return;
366 default:
367 break;
370 aff_combination_elt (comb, type, expr);
373 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
374 combination COMB. */
376 static tree
377 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
379 enum tree_code code;
380 tree type1 = type;
381 if (POINTER_TYPE_P (type))
382 type1 = sizetype;
384 widest_int scale = wide_int_ext_for_comb (scale_in, type);
386 if (scale == -1
387 && POINTER_TYPE_P (TREE_TYPE (elt)))
389 elt = convert_to_ptrofftype (elt);
390 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
391 scale = 1;
394 if (scale == 1)
396 if (!expr)
398 if (POINTER_TYPE_P (TREE_TYPE (elt)))
399 return elt;
400 else
401 return fold_convert (type1, elt);
404 if (POINTER_TYPE_P (TREE_TYPE (expr)))
405 return fold_build_pointer_plus (expr, elt);
406 if (POINTER_TYPE_P (TREE_TYPE (elt)))
407 return fold_build_pointer_plus (elt, expr);
408 return fold_build2 (PLUS_EXPR, type1,
409 expr, fold_convert (type1, elt));
412 if (scale == -1)
414 if (!expr)
415 return fold_build1 (NEGATE_EXPR, type1,
416 fold_convert (type1, elt));
418 if (POINTER_TYPE_P (TREE_TYPE (expr)))
420 elt = convert_to_ptrofftype (elt);
421 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
422 return fold_build_pointer_plus (expr, elt);
424 return fold_build2 (MINUS_EXPR, type1,
425 expr, fold_convert (type1, elt));
428 elt = fold_convert (type1, elt);
429 if (!expr)
430 return fold_build2 (MULT_EXPR, type1, elt,
431 wide_int_to_tree (type1, scale));
433 if (wi::neg_p (scale))
435 code = MINUS_EXPR;
436 scale = -scale;
438 else
439 code = PLUS_EXPR;
441 elt = fold_build2 (MULT_EXPR, type1, elt,
442 wide_int_to_tree (type1, scale));
443 if (POINTER_TYPE_P (TREE_TYPE (expr)))
445 if (code == MINUS_EXPR)
446 elt = fold_build1 (NEGATE_EXPR, type1, elt);
447 return fold_build_pointer_plus (expr, elt);
449 return fold_build2 (code, type1, expr, elt);
452 /* Makes tree from the affine combination COMB. */
454 tree
455 aff_combination_to_tree (aff_tree *comb)
457 tree type = comb->type;
458 tree expr = NULL_TREE;
459 unsigned i;
460 widest_int off, sgn;
461 tree type1 = type;
462 if (POINTER_TYPE_P (type))
463 type1 = sizetype;
465 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
467 for (i = 0; i < comb->n; i++)
468 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
470 if (comb->rest)
471 expr = add_elt_to_tree (expr, type, comb->rest, 1);
473 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
474 unsigned. */
475 if (wi::neg_p (comb->offset))
477 off = -comb->offset;
478 sgn = -1;
480 else
482 off = comb->offset;
483 sgn = 1;
485 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn);
488 /* Copies the tree elements of COMB to ensure that they are not shared. */
490 void
491 unshare_aff_combination (aff_tree *comb)
493 unsigned i;
495 for (i = 0; i < comb->n; i++)
496 comb->elts[i].val = unshare_expr (comb->elts[i].val);
497 if (comb->rest)
498 comb->rest = unshare_expr (comb->rest);
501 /* Remove M-th element from COMB. */
503 void
504 aff_combination_remove_elt (aff_tree *comb, unsigned m)
506 comb->n--;
507 if (m <= comb->n)
508 comb->elts[m] = comb->elts[comb->n];
509 if (comb->rest)
511 comb->elts[comb->n].coef = 1;
512 comb->elts[comb->n].val = comb->rest;
513 comb->rest = NULL_TREE;
514 comb->n++;
518 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
519 C * COEF is added to R. */
522 static void
523 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
524 aff_tree *r)
526 unsigned i;
527 tree aval, type;
529 for (i = 0; i < c->n; i++)
531 aval = c->elts[i].val;
532 if (val)
534 type = TREE_TYPE (aval);
535 aval = fold_build2 (MULT_EXPR, type, aval,
536 fold_convert (type, val));
539 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
542 if (c->rest)
544 aval = c->rest;
545 if (val)
547 type = TREE_TYPE (aval);
548 aval = fold_build2 (MULT_EXPR, type, aval,
549 fold_convert (type, val));
552 aff_combination_add_elt (r, aval, coef);
555 if (val)
556 aff_combination_add_elt (r, val, coef * c->offset);
557 else
558 aff_combination_add_cst (r, coef * c->offset);
561 /* Multiplies C1 by C2, storing the result to R */
563 void
564 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
566 unsigned i;
567 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
569 aff_combination_zero (r, c1->type);
571 for (i = 0; i < c2->n; i++)
572 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
573 if (c2->rest)
574 aff_combination_add_product (c1, 1, c2->rest, r);
575 aff_combination_add_product (c1, c2->offset, NULL, r);
578 /* Returns the element of COMB whose value is VAL, or NULL if no such
579 element exists. If IDX is not NULL, it is set to the index of VAL in
580 COMB. */
582 static struct aff_comb_elt *
583 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
585 unsigned i;
587 for (i = 0; i < comb->n; i++)
588 if (operand_equal_p (comb->elts[i].val, val, 0))
590 if (idx)
591 *idx = i;
593 return &comb->elts[i];
596 return NULL;
599 /* Element of the cache that maps ssa name NAME to its expanded form
600 as an affine expression EXPANSION. */
602 struct name_expansion
604 aff_tree expansion;
606 /* True if the expansion for the name is just being generated. */
607 unsigned in_progress : 1;
610 /* Expands SSA names in COMB recursively. CACHE is used to cache the
611 results. */
613 void
614 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
615 hash_map<tree, name_expansion *> **cache)
617 unsigned i;
618 aff_tree to_add, current, curre;
619 tree e, rhs;
620 gimple *def;
621 widest_int scale;
622 struct name_expansion *exp;
624 aff_combination_zero (&to_add, comb->type);
625 for (i = 0; i < comb->n; i++)
627 tree type, name;
628 enum tree_code code;
630 e = comb->elts[i].val;
631 type = TREE_TYPE (e);
632 name = e;
633 /* Look through some conversions. */
634 if (CONVERT_EXPR_P (e)
635 && (TYPE_PRECISION (type)
636 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
637 name = TREE_OPERAND (e, 0);
638 if (TREE_CODE (name) != SSA_NAME)
639 continue;
640 def = SSA_NAME_DEF_STMT (name);
641 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
642 continue;
644 code = gimple_assign_rhs_code (def);
645 if (code != SSA_NAME
646 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
647 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
648 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
649 continue;
651 /* We do not know whether the reference retains its value at the
652 place where the expansion is used. */
653 if (TREE_CODE_CLASS (code) == tcc_reference)
654 continue;
656 if (!*cache)
657 *cache = new hash_map<tree, name_expansion *>;
658 name_expansion **slot = &(*cache)->get_or_insert (e);
659 exp = *slot;
661 if (!exp)
663 exp = XNEW (struct name_expansion);
664 exp->in_progress = 1;
665 *slot = exp;
666 /* In principle this is a generally valid folding, but
667 it is not unconditionally an optimization, so do it
668 here and not in fold_unary. */
669 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
670 than the type of X and overflow for the type of X is
671 undefined. */
672 if (e != name
673 && INTEGRAL_TYPE_P (type)
674 && INTEGRAL_TYPE_P (TREE_TYPE (name))
675 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
676 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
677 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
678 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
679 rhs = fold_build2 (code, type,
680 fold_convert (type, gimple_assign_rhs1 (def)),
681 fold_convert (type, gimple_assign_rhs2 (def)));
682 else
684 rhs = gimple_assign_rhs_to_tree (def);
685 if (e != name)
686 rhs = fold_convert (type, rhs);
688 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
689 exp->expansion = current;
690 exp->in_progress = 0;
692 else
694 /* Since we follow the definitions in the SSA form, we should not
695 enter a cycle unless we pass through a phi node. */
696 gcc_assert (!exp->in_progress);
697 current = exp->expansion;
700 /* Accumulate the new terms to TO_ADD, so that we do not modify
701 COMB while traversing it; include the term -coef * E, to remove
702 it from COMB. */
703 scale = comb->elts[i].coef;
704 aff_combination_zero (&curre, comb->type);
705 aff_combination_add_elt (&curre, e, -scale);
706 aff_combination_scale (&current, scale);
707 aff_combination_add (&to_add, &current);
708 aff_combination_add (&to_add, &curre);
710 aff_combination_add (comb, &to_add);
713 /* Similar to tree_to_aff_combination, but follows SSA name definitions
714 and expands them recursively. CACHE is used to cache the expansions
715 of the ssa names, to avoid exponential time complexity for cases
716 like
718 a1 = a0 + a0;
719 a2 = a1 + a1;
720 a3 = a2 + a2;
721 ... */
723 void
724 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
725 hash_map<tree, name_expansion *> **cache)
727 tree_to_aff_combination (expr, type, comb);
728 aff_combination_expand (comb, cache);
731 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
732 hash_map::traverse. */
734 bool
735 free_name_expansion (tree const &, name_expansion **value, void *)
737 free (*value);
738 return true;
741 /* Frees memory allocated for the CACHE used by
742 tree_to_aff_combination_expand. */
744 void
745 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
747 if (!*cache)
748 return;
750 (*cache)->traverse<void *, free_name_expansion> (NULL);
751 delete (*cache);
752 *cache = NULL;
755 /* If VAL != CST * DIV for any constant CST, returns false.
756 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
757 and if they are different, returns false. Finally, if neither of these
758 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
759 is set to true. */
761 static bool
762 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
763 bool *mult_set, widest_int *mult)
765 widest_int rem, cst;
767 if (val == 0)
769 if (*mult_set && *mult != 0)
770 return false;
771 *mult_set = true;
772 *mult = 0;
773 return true;
776 if (div == 0)
777 return false;
779 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
780 return false;
782 if (*mult_set && *mult != cst)
783 return false;
785 *mult_set = true;
786 *mult = cst;
787 return true;
790 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
791 X is stored to MULT. */
793 bool
794 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
795 widest_int *mult)
797 bool mult_set = false;
798 unsigned i;
800 if (val->n == 0 && val->offset == 0)
802 *mult = 0;
803 return true;
805 if (val->n != div->n)
806 return false;
808 if (val->rest || div->rest)
809 return false;
811 if (!wide_int_constant_multiple_p (val->offset, div->offset,
812 &mult_set, mult))
813 return false;
815 for (i = 0; i < div->n; i++)
817 struct aff_comb_elt *elt
818 = aff_combination_find_elt (val, div->elts[i].val, NULL);
819 if (!elt)
820 return false;
821 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
822 &mult_set, mult))
823 return false;
826 gcc_assert (mult_set);
827 return true;
830 /* Prints the affine VAL to the FILE. */
832 static void
833 print_aff (FILE *file, aff_tree *val)
835 unsigned i;
836 signop sgn = TYPE_SIGN (val->type);
837 if (POINTER_TYPE_P (val->type))
838 sgn = SIGNED;
839 fprintf (file, "{\n type = ");
840 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
841 fprintf (file, "\n offset = ");
842 print_dec (val->offset, file, sgn);
843 if (val->n > 0)
845 fprintf (file, "\n elements = {\n");
846 for (i = 0; i < val->n; i++)
848 fprintf (file, " [%d] = ", i);
849 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
851 fprintf (file, " * ");
852 print_dec (val->elts[i].coef, file, sgn);
853 if (i != val->n - 1)
854 fprintf (file, ", \n");
856 fprintf (file, "\n }");
858 if (val->rest)
860 fprintf (file, "\n rest = ");
861 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
863 fprintf (file, "\n}");
866 /* Prints the affine VAL to the standard error, used for debugging. */
868 DEBUG_FUNCTION void
869 debug_aff (aff_tree *val)
871 print_aff (stderr, val);
872 fprintf (stderr, "\n");
875 /* Computes address of the reference REF in ADDR. The size of the accessed
876 location is stored to SIZE. Returns the ultimate containing object to
877 which REF refers. */
879 tree
880 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
882 HOST_WIDE_INT bitsize, bitpos;
883 tree toff;
884 machine_mode mode;
885 int uns, rev, vol;
886 aff_tree tmp;
887 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
888 &uns, &rev, &vol);
889 tree base_addr = build_fold_addr_expr (base);
891 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
893 tree_to_aff_combination (base_addr, sizetype, addr);
895 if (toff)
897 tree_to_aff_combination (toff, sizetype, &tmp);
898 aff_combination_add (addr, &tmp);
901 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
902 aff_combination_add (addr, &tmp);
904 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
906 return base;
909 /* Returns true if a region of size SIZE1 at position 0 and a region of
910 size SIZE2 at position DIFF cannot overlap. */
912 bool
913 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
914 const widest_int &size2)
916 /* Unless the difference is a constant, we fail. */
917 if (diff->n != 0)
918 return false;
920 if (wi::neg_p (diff->offset))
922 /* The second object is before the first one, we succeed if the last
923 element of the second object is before the start of the first one. */
924 return wi::neg_p (diff->offset + size2 - 1);
926 else
928 /* We succeed if the second object starts after the first one ends. */
929 return size1 <= diff->offset;