2015-05-22 Pascal Obry <obry@adacore.com>
[official-gcc.git] / gcc / tree-affine.c
bloba8055d6485a2a81fe67b1b35375a0f9de2cb7a42
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "hashtab.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "rtl.h"
40 #include "flags.h"
41 #include "statistics.h"
42 #include "real.h"
43 #include "fixed-value.h"
44 #include "insn-config.h"
45 #include "expmed.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "emit-rtl.h"
50 #include "varasm.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "tree-pretty-print.h"
54 #include "tree-affine.h"
55 #include "predict.h"
56 #include "basic-block.h"
57 #include "tree-ssa-alias.h"
58 #include "internal-fn.h"
59 #include "gimple-expr.h"
60 #include "is-a.h"
61 #include "gimple.h"
62 #include "gimplify.h"
63 #include "dumpfile.h"
64 #include "cfgexpand.h"
65 #include "wide-int-print.h"
67 /* Extends CST as appropriate for the affine combinations COMB. */
69 widest_int
70 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
72 return wi::sext (cst, TYPE_PRECISION (comb->type));
75 /* Initializes affine combination COMB so that its value is zero in TYPE. */
77 static void
78 aff_combination_zero (aff_tree *comb, tree type)
80 int i;
81 comb->type = type;
82 comb->offset = 0;
83 comb->n = 0;
84 for (i = 0; i < MAX_AFF_ELTS; i++)
85 comb->elts[i].coef = 0;
86 comb->rest = NULL_TREE;
89 /* Sets COMB to CST. */
91 void
92 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
94 aff_combination_zero (comb, type);
95 comb->offset = wide_int_ext_for_comb (cst, comb);;
98 /* Sets COMB to single element ELT. */
100 void
101 aff_combination_elt (aff_tree *comb, tree type, tree elt)
103 aff_combination_zero (comb, type);
105 comb->n = 1;
106 comb->elts[0].val = elt;
107 comb->elts[0].coef = 1;
110 /* Scales COMB by SCALE. */
112 void
113 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
115 unsigned i, j;
117 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
118 if (scale == 1)
119 return;
121 if (scale == 0)
123 aff_combination_zero (comb, comb->type);
124 return;
127 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
128 for (i = 0, j = 0; i < comb->n; i++)
130 widest_int new_coef
131 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
132 /* A coefficient may become zero due to overflow. Remove the zero
133 elements. */
134 if (new_coef == 0)
135 continue;
136 comb->elts[j].coef = new_coef;
137 comb->elts[j].val = comb->elts[i].val;
138 j++;
140 comb->n = j;
142 if (comb->rest)
144 tree type = comb->type;
145 if (POINTER_TYPE_P (type))
146 type = sizetype;
147 if (comb->n < MAX_AFF_ELTS)
149 comb->elts[comb->n].coef = scale;
150 comb->elts[comb->n].val = comb->rest;
151 comb->rest = NULL_TREE;
152 comb->n++;
154 else
155 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
156 wide_int_to_tree (type, scale));
160 /* Adds ELT * SCALE to COMB. */
162 void
163 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
165 unsigned i;
166 tree type;
168 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
169 if (scale == 0)
170 return;
172 for (i = 0; i < comb->n; i++)
173 if (operand_equal_p (comb->elts[i].val, elt, 0))
175 widest_int new_coef
176 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
177 if (new_coef != 0)
179 comb->elts[i].coef = new_coef;
180 return;
183 comb->n--;
184 comb->elts[i] = comb->elts[comb->n];
186 if (comb->rest)
188 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
189 comb->elts[comb->n].coef = 1;
190 comb->elts[comb->n].val = comb->rest;
191 comb->rest = NULL_TREE;
192 comb->n++;
194 return;
196 if (comb->n < MAX_AFF_ELTS)
198 comb->elts[comb->n].coef = scale;
199 comb->elts[comb->n].val = elt;
200 comb->n++;
201 return;
204 type = comb->type;
205 if (POINTER_TYPE_P (type))
206 type = sizetype;
208 if (scale == 1)
209 elt = fold_convert (type, elt);
210 else
211 elt = fold_build2 (MULT_EXPR, type,
212 fold_convert (type, elt),
213 wide_int_to_tree (type, scale));
215 if (comb->rest)
216 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
217 elt);
218 else
219 comb->rest = elt;
222 /* Adds CST to C. */
224 static void
225 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
227 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
230 /* Adds COMB2 to COMB1. */
232 void
233 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
235 unsigned i;
237 aff_combination_add_cst (comb1, comb2->offset);
238 for (i = 0; i < comb2->n; i++)
239 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
240 if (comb2->rest)
241 aff_combination_add_elt (comb1, comb2->rest, 1);
244 /* Converts affine combination COMB to TYPE. */
246 void
247 aff_combination_convert (aff_tree *comb, tree type)
249 unsigned i, j;
250 tree comb_type = comb->type;
252 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
254 tree val = fold_convert (type, aff_combination_to_tree (comb));
255 tree_to_aff_combination (val, type, comb);
256 return;
259 comb->type = type;
260 if (comb->rest && !POINTER_TYPE_P (type))
261 comb->rest = fold_convert (type, comb->rest);
263 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
264 return;
266 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
267 for (i = j = 0; i < comb->n; i++)
269 if (comb->elts[i].coef == 0)
270 continue;
271 comb->elts[j].coef = comb->elts[i].coef;
272 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
273 j++;
276 comb->n = j;
277 if (comb->n < MAX_AFF_ELTS && comb->rest)
279 comb->elts[comb->n].coef = 1;
280 comb->elts[comb->n].val = comb->rest;
281 comb->rest = NULL_TREE;
282 comb->n++;
286 /* Splits EXPR into an affine combination of parts. */
288 void
289 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
291 aff_tree tmp;
292 enum tree_code code;
293 tree cst, core, toffset;
294 HOST_WIDE_INT bitpos, bitsize;
295 machine_mode mode;
296 int unsignedp, volatilep;
298 STRIP_NOPS (expr);
300 code = TREE_CODE (expr);
301 switch (code)
303 case INTEGER_CST:
304 aff_combination_const (comb, type, wi::to_widest (expr));
305 return;
307 case POINTER_PLUS_EXPR:
308 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
309 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
310 aff_combination_add (comb, &tmp);
311 return;
313 case PLUS_EXPR:
314 case MINUS_EXPR:
315 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
316 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
317 if (code == MINUS_EXPR)
318 aff_combination_scale (&tmp, -1);
319 aff_combination_add (comb, &tmp);
320 return;
322 case MULT_EXPR:
323 cst = TREE_OPERAND (expr, 1);
324 if (TREE_CODE (cst) != INTEGER_CST)
325 break;
326 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
327 aff_combination_scale (comb, wi::to_widest (cst));
328 return;
330 case NEGATE_EXPR:
331 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
332 aff_combination_scale (comb, -1);
333 return;
335 case BIT_NOT_EXPR:
336 /* ~x = -x - 1 */
337 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
338 aff_combination_scale (comb, -1);
339 aff_combination_add_cst (comb, -1);
340 return;
342 case ADDR_EXPR:
343 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
344 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
346 expr = TREE_OPERAND (expr, 0);
347 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
348 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
349 aff_combination_add (comb, &tmp);
350 return;
352 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
353 &toffset, &mode, &unsignedp, &volatilep,
354 false);
355 if (bitpos % BITS_PER_UNIT != 0)
356 break;
357 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
358 if (TREE_CODE (core) == MEM_REF)
360 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
361 core = TREE_OPERAND (core, 0);
363 else
364 core = build_fold_addr_expr (core);
366 if (TREE_CODE (core) == ADDR_EXPR)
367 aff_combination_add_elt (comb, core, 1);
368 else
370 tree_to_aff_combination (core, type, &tmp);
371 aff_combination_add (comb, &tmp);
373 if (toffset)
375 tree_to_aff_combination (toffset, type, &tmp);
376 aff_combination_add (comb, &tmp);
378 return;
380 case MEM_REF:
381 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
382 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
383 type, comb);
384 else if (integer_zerop (TREE_OPERAND (expr, 1)))
386 aff_combination_elt (comb, type, expr);
387 return;
389 else
390 aff_combination_elt (comb, type,
391 build2 (MEM_REF, TREE_TYPE (expr),
392 TREE_OPERAND (expr, 0),
393 build_int_cst
394 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
395 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
396 aff_combination_add (comb, &tmp);
397 return;
399 default:
400 break;
403 aff_combination_elt (comb, type, expr);
406 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
407 combination COMB. */
409 static tree
410 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
411 aff_tree *comb ATTRIBUTE_UNUSED)
413 enum tree_code code;
414 tree type1 = type;
415 if (POINTER_TYPE_P (type))
416 type1 = sizetype;
418 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
420 if (scale == -1
421 && POINTER_TYPE_P (TREE_TYPE (elt)))
423 elt = convert_to_ptrofftype (elt);
424 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
425 scale = 1;
428 if (scale == 1)
430 if (!expr)
432 if (POINTER_TYPE_P (TREE_TYPE (elt)))
433 return elt;
434 else
435 return fold_convert (type1, elt);
438 if (POINTER_TYPE_P (TREE_TYPE (expr)))
439 return fold_build_pointer_plus (expr, elt);
440 if (POINTER_TYPE_P (TREE_TYPE (elt)))
441 return fold_build_pointer_plus (elt, expr);
442 return fold_build2 (PLUS_EXPR, type1,
443 expr, fold_convert (type1, elt));
446 if (scale == -1)
448 if (!expr)
449 return fold_build1 (NEGATE_EXPR, type1,
450 fold_convert (type1, elt));
452 if (POINTER_TYPE_P (TREE_TYPE (expr)))
454 elt = convert_to_ptrofftype (elt);
455 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
456 return fold_build_pointer_plus (expr, elt);
458 return fold_build2 (MINUS_EXPR, type1,
459 expr, fold_convert (type1, elt));
462 elt = fold_convert (type1, elt);
463 if (!expr)
464 return fold_build2 (MULT_EXPR, type1, elt,
465 wide_int_to_tree (type1, scale));
467 if (wi::neg_p (scale))
469 code = MINUS_EXPR;
470 scale = -scale;
472 else
473 code = PLUS_EXPR;
475 elt = fold_build2 (MULT_EXPR, type1, elt,
476 wide_int_to_tree (type1, scale));
477 if (POINTER_TYPE_P (TREE_TYPE (expr)))
479 if (code == MINUS_EXPR)
480 elt = fold_build1 (NEGATE_EXPR, type1, elt);
481 return fold_build_pointer_plus (expr, elt);
483 return fold_build2 (code, type1, expr, elt);
486 /* Makes tree from the affine combination COMB. */
488 tree
489 aff_combination_to_tree (aff_tree *comb)
491 tree type = comb->type;
492 tree expr = NULL_TREE;
493 unsigned i;
494 widest_int off, sgn;
495 tree type1 = type;
496 if (POINTER_TYPE_P (type))
497 type1 = sizetype;
499 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
501 for (i = 0; i < comb->n; i++)
502 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
503 comb);
505 if (comb->rest)
506 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
508 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
509 unsigned. */
510 if (wi::neg_p (comb->offset))
512 off = -comb->offset;
513 sgn = -1;
515 else
517 off = comb->offset;
518 sgn = 1;
520 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
521 comb);
524 /* Copies the tree elements of COMB to ensure that they are not shared. */
526 void
527 unshare_aff_combination (aff_tree *comb)
529 unsigned i;
531 for (i = 0; i < comb->n; i++)
532 comb->elts[i].val = unshare_expr (comb->elts[i].val);
533 if (comb->rest)
534 comb->rest = unshare_expr (comb->rest);
537 /* Remove M-th element from COMB. */
539 void
540 aff_combination_remove_elt (aff_tree *comb, unsigned m)
542 comb->n--;
543 if (m <= comb->n)
544 comb->elts[m] = comb->elts[comb->n];
545 if (comb->rest)
547 comb->elts[comb->n].coef = 1;
548 comb->elts[comb->n].val = comb->rest;
549 comb->rest = NULL_TREE;
550 comb->n++;
554 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
555 C * COEF is added to R. */
558 static void
559 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
560 aff_tree *r)
562 unsigned i;
563 tree aval, type;
565 for (i = 0; i < c->n; i++)
567 aval = c->elts[i].val;
568 if (val)
570 type = TREE_TYPE (aval);
571 aval = fold_build2 (MULT_EXPR, type, aval,
572 fold_convert (type, val));
575 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
578 if (c->rest)
580 aval = c->rest;
581 if (val)
583 type = TREE_TYPE (aval);
584 aval = fold_build2 (MULT_EXPR, type, aval,
585 fold_convert (type, val));
588 aff_combination_add_elt (r, aval, coef);
591 if (val)
592 aff_combination_add_elt (r, val, coef * c->offset);
593 else
594 aff_combination_add_cst (r, coef * c->offset);
597 /* Multiplies C1 by C2, storing the result to R */
599 void
600 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
602 unsigned i;
603 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
605 aff_combination_zero (r, c1->type);
607 for (i = 0; i < c2->n; i++)
608 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
609 if (c2->rest)
610 aff_combination_add_product (c1, 1, c2->rest, r);
611 aff_combination_add_product (c1, c2->offset, NULL, r);
614 /* Returns the element of COMB whose value is VAL, or NULL if no such
615 element exists. If IDX is not NULL, it is set to the index of VAL in
616 COMB. */
618 static struct aff_comb_elt *
619 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
621 unsigned i;
623 for (i = 0; i < comb->n; i++)
624 if (operand_equal_p (comb->elts[i].val, val, 0))
626 if (idx)
627 *idx = i;
629 return &comb->elts[i];
632 return NULL;
635 /* Element of the cache that maps ssa name NAME to its expanded form
636 as an affine expression EXPANSION. */
638 struct name_expansion
640 aff_tree expansion;
642 /* True if the expansion for the name is just being generated. */
643 unsigned in_progress : 1;
646 /* Expands SSA names in COMB recursively. CACHE is used to cache the
647 results. */
649 void
650 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
651 hash_map<tree, name_expansion *> **cache)
653 unsigned i;
654 aff_tree to_add, current, curre;
655 tree e, rhs;
656 gimple def;
657 widest_int scale;
658 struct name_expansion *exp;
660 aff_combination_zero (&to_add, comb->type);
661 for (i = 0; i < comb->n; i++)
663 tree type, name;
664 enum tree_code code;
666 e = comb->elts[i].val;
667 type = TREE_TYPE (e);
668 name = e;
669 /* Look through some conversions. */
670 if (CONVERT_EXPR_P (e)
671 && (TYPE_PRECISION (type)
672 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
673 name = TREE_OPERAND (e, 0);
674 if (TREE_CODE (name) != SSA_NAME)
675 continue;
676 def = SSA_NAME_DEF_STMT (name);
677 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
678 continue;
680 code = gimple_assign_rhs_code (def);
681 if (code != SSA_NAME
682 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
683 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
684 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
685 continue;
687 /* We do not know whether the reference retains its value at the
688 place where the expansion is used. */
689 if (TREE_CODE_CLASS (code) == tcc_reference)
690 continue;
692 if (!*cache)
693 *cache = new hash_map<tree, name_expansion *>;
694 name_expansion **slot = &(*cache)->get_or_insert (e);
695 exp = *slot;
697 if (!exp)
699 exp = XNEW (struct name_expansion);
700 exp->in_progress = 1;
701 *slot = exp;
702 /* In principle this is a generally valid folding, but
703 it is not unconditionally an optimization, so do it
704 here and not in fold_unary. */
705 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
706 than the type of X and overflow for the type of X is
707 undefined. */
708 if (e != name
709 && INTEGRAL_TYPE_P (type)
710 && INTEGRAL_TYPE_P (TREE_TYPE (name))
711 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
712 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
713 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
714 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
715 rhs = fold_build2 (code, type,
716 fold_convert (type, gimple_assign_rhs1 (def)),
717 fold_convert (type, gimple_assign_rhs2 (def)));
718 else
720 rhs = gimple_assign_rhs_to_tree (def);
721 if (e != name)
722 rhs = fold_convert (type, rhs);
724 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
725 exp->expansion = current;
726 exp->in_progress = 0;
728 else
730 /* Since we follow the definitions in the SSA form, we should not
731 enter a cycle unless we pass through a phi node. */
732 gcc_assert (!exp->in_progress);
733 current = exp->expansion;
736 /* Accumulate the new terms to TO_ADD, so that we do not modify
737 COMB while traversing it; include the term -coef * E, to remove
738 it from COMB. */
739 scale = comb->elts[i].coef;
740 aff_combination_zero (&curre, comb->type);
741 aff_combination_add_elt (&curre, e, -scale);
742 aff_combination_scale (&current, scale);
743 aff_combination_add (&to_add, &current);
744 aff_combination_add (&to_add, &curre);
746 aff_combination_add (comb, &to_add);
749 /* Similar to tree_to_aff_combination, but follows SSA name definitions
750 and expands them recursively. CACHE is used to cache the expansions
751 of the ssa names, to avoid exponential time complexity for cases
752 like
754 a1 = a0 + a0;
755 a2 = a1 + a1;
756 a3 = a2 + a2;
757 ... */
759 void
760 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
761 hash_map<tree, name_expansion *> **cache)
763 tree_to_aff_combination (expr, type, comb);
764 aff_combination_expand (comb, cache);
767 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
768 hash_map::traverse. */
770 bool
771 free_name_expansion (tree const &, name_expansion **value, void *)
773 free (*value);
774 return true;
777 /* Frees memory allocated for the CACHE used by
778 tree_to_aff_combination_expand. */
780 void
781 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
783 if (!*cache)
784 return;
786 (*cache)->traverse<void *, free_name_expansion> (NULL);
787 delete (*cache);
788 *cache = NULL;
791 /* If VAL != CST * DIV for any constant CST, returns false.
792 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
793 and if they are different, returns false. Finally, if neither of these
794 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
795 is set to true. */
797 static bool
798 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
799 bool *mult_set, widest_int *mult)
801 widest_int rem, cst;
803 if (val == 0)
805 if (*mult_set && mult != 0)
806 return false;
807 *mult_set = true;
808 *mult = 0;
809 return true;
812 if (div == 0)
813 return false;
815 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
816 return false;
818 if (*mult_set && *mult != cst)
819 return false;
821 *mult_set = true;
822 *mult = cst;
823 return true;
826 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
827 X is stored to MULT. */
829 bool
830 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
831 widest_int *mult)
833 bool mult_set = false;
834 unsigned i;
836 if (val->n == 0 && val->offset == 0)
838 *mult = 0;
839 return true;
841 if (val->n != div->n)
842 return false;
844 if (val->rest || div->rest)
845 return false;
847 if (!wide_int_constant_multiple_p (val->offset, div->offset,
848 &mult_set, mult))
849 return false;
851 for (i = 0; i < div->n; i++)
853 struct aff_comb_elt *elt
854 = aff_combination_find_elt (val, div->elts[i].val, NULL);
855 if (!elt)
856 return false;
857 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
858 &mult_set, mult))
859 return false;
862 gcc_assert (mult_set);
863 return true;
866 /* Prints the affine VAL to the FILE. */
868 static void
869 print_aff (FILE *file, aff_tree *val)
871 unsigned i;
872 signop sgn = TYPE_SIGN (val->type);
873 if (POINTER_TYPE_P (val->type))
874 sgn = SIGNED;
875 fprintf (file, "{\n type = ");
876 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
877 fprintf (file, "\n offset = ");
878 print_dec (val->offset, file, sgn);
879 if (val->n > 0)
881 fprintf (file, "\n elements = {\n");
882 for (i = 0; i < val->n; i++)
884 fprintf (file, " [%d] = ", i);
885 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
887 fprintf (file, " * ");
888 print_dec (val->elts[i].coef, file, sgn);
889 if (i != val->n - 1)
890 fprintf (file, ", \n");
892 fprintf (file, "\n }");
894 if (val->rest)
896 fprintf (file, "\n rest = ");
897 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
899 fprintf (file, "\n}");
902 /* Prints the affine VAL to the standard error, used for debugging. */
904 DEBUG_FUNCTION void
905 debug_aff (aff_tree *val)
907 print_aff (stderr, val);
908 fprintf (stderr, "\n");
911 /* Computes address of the reference REF in ADDR. The size of the accessed
912 location is stored to SIZE. Returns the ultimate containing object to
913 which REF refers. */
915 tree
916 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
918 HOST_WIDE_INT bitsize, bitpos;
919 tree toff;
920 machine_mode mode;
921 int uns, vol;
922 aff_tree tmp;
923 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
924 &uns, &vol, false);
925 tree base_addr = build_fold_addr_expr (base);
927 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
929 tree_to_aff_combination (base_addr, sizetype, addr);
931 if (toff)
933 tree_to_aff_combination (toff, sizetype, &tmp);
934 aff_combination_add (addr, &tmp);
937 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
938 aff_combination_add (addr, &tmp);
940 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
942 return base;
945 /* Returns true if a region of size SIZE1 at position 0 and a region of
946 size SIZE2 at position DIFF cannot overlap. */
948 bool
949 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
950 const widest_int &size2)
952 /* Unless the difference is a constant, we fail. */
953 if (diff->n != 0)
954 return false;
956 if (wi::neg_p (diff->offset))
958 /* The second object is before the first one, we succeed if the last
959 element of the second object is before the start of the first one. */
960 return wi::neg_p (diff->offset + size2 - 1);
962 else
964 /* We succeed if the second object starts after the first one ends. */
965 return wi::les_p (size1, diff->offset);