testsuite: Update pr79251 ilp32 store counts.
[official-gcc.git] / gcc / tree-affine.c
blobb273adb173de8653706ee9c9b7af02cae8b44934
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
31 #include "gimplify.h"
32 #include "dumpfile.h"
33 #include "cfgexpand.h"
35 /* Extends CST as appropriate for the affine combinations COMB. */
37 static widest_int
38 wide_int_ext_for_comb (const widest_int &cst, tree type)
40 return wi::sext (cst, TYPE_PRECISION (type));
43 /* Likewise for polynomial offsets. */
45 static poly_widest_int
46 wide_int_ext_for_comb (const poly_widest_int &cst, tree type)
48 return wi::sext (cst, TYPE_PRECISION (type));
51 /* Initializes affine combination COMB so that its value is zero in TYPE. */
53 static void
54 aff_combination_zero (aff_tree *comb, tree type)
56 int i;
57 comb->type = type;
58 comb->offset = 0;
59 comb->n = 0;
60 for (i = 0; i < MAX_AFF_ELTS; i++)
61 comb->elts[i].coef = 0;
62 comb->rest = NULL_TREE;
65 /* Sets COMB to CST. */
67 void
68 aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst)
70 aff_combination_zero (comb, type);
71 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
74 /* Sets COMB to single element ELT. */
76 void
77 aff_combination_elt (aff_tree *comb, tree type, tree elt)
79 aff_combination_zero (comb, type);
81 comb->n = 1;
82 comb->elts[0].val = elt;
83 comb->elts[0].coef = 1;
86 /* Scales COMB by SCALE. */
88 void
89 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
91 unsigned i, j;
93 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
94 if (scale == 1)
95 return;
97 if (scale == 0)
99 aff_combination_zero (comb, comb->type);
100 return;
103 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
104 for (i = 0, j = 0; i < comb->n; i++)
106 widest_int new_coef
107 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
108 /* A coefficient may become zero due to overflow. Remove the zero
109 elements. */
110 if (new_coef == 0)
111 continue;
112 comb->elts[j].coef = new_coef;
113 comb->elts[j].val = comb->elts[i].val;
114 j++;
116 comb->n = j;
118 if (comb->rest)
120 tree type = comb->type;
121 if (POINTER_TYPE_P (type))
122 type = sizetype;
123 if (comb->n < MAX_AFF_ELTS)
125 comb->elts[comb->n].coef = scale;
126 comb->elts[comb->n].val = comb->rest;
127 comb->rest = NULL_TREE;
128 comb->n++;
130 else
131 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
132 wide_int_to_tree (type, scale));
136 /* Adds ELT * SCALE to COMB. */
138 void
139 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
141 unsigned i;
142 tree type;
144 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
145 if (scale == 0)
146 return;
148 for (i = 0; i < comb->n; i++)
149 if (operand_equal_p (comb->elts[i].val, elt, 0))
151 widest_int new_coef
152 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
153 if (new_coef != 0)
155 comb->elts[i].coef = new_coef;
156 return;
159 comb->n--;
160 comb->elts[i] = comb->elts[comb->n];
162 if (comb->rest)
164 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
165 comb->elts[comb->n].coef = 1;
166 comb->elts[comb->n].val = comb->rest;
167 comb->rest = NULL_TREE;
168 comb->n++;
170 return;
172 if (comb->n < MAX_AFF_ELTS)
174 comb->elts[comb->n].coef = scale;
175 comb->elts[comb->n].val = elt;
176 comb->n++;
177 return;
180 type = comb->type;
181 if (POINTER_TYPE_P (type))
182 type = sizetype;
184 if (scale == 1)
185 elt = fold_convert (type, elt);
186 else
187 elt = fold_build2 (MULT_EXPR, type,
188 fold_convert (type, elt),
189 wide_int_to_tree (type, scale));
191 if (comb->rest)
192 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
193 elt);
194 else
195 comb->rest = elt;
198 /* Adds CST to C. */
200 static void
201 aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst)
203 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
206 /* Adds COMB2 to COMB1. */
208 void
209 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
211 unsigned i;
213 aff_combination_add_cst (comb1, comb2->offset);
214 for (i = 0; i < comb2->n; i++)
215 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
216 if (comb2->rest)
217 aff_combination_add_elt (comb1, comb2->rest, 1);
220 /* Converts affine combination COMB to TYPE. */
222 void
223 aff_combination_convert (aff_tree *comb, tree type)
225 unsigned i, j;
226 tree comb_type = comb->type;
228 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
230 tree val = fold_convert (type, aff_combination_to_tree (comb));
231 tree_to_aff_combination (val, type, comb);
232 return;
235 comb->type = type;
236 if (comb->rest && !POINTER_TYPE_P (type))
237 comb->rest = fold_convert (type, comb->rest);
239 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
240 return;
242 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
243 for (i = j = 0; i < comb->n; i++)
245 if (comb->elts[i].coef == 0)
246 continue;
247 comb->elts[j].coef = comb->elts[i].coef;
248 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
249 j++;
252 comb->n = j;
253 if (comb->n < MAX_AFF_ELTS && comb->rest)
255 comb->elts[comb->n].coef = 1;
256 comb->elts[comb->n].val = comb->rest;
257 comb->rest = NULL_TREE;
258 comb->n++;
262 /* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
263 true when that was successful and returns the combination in COMB. */
265 static bool
266 expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
267 tree op0, tree op1 = NULL_TREE)
269 aff_tree tmp;
270 poly_int64 bitpos, bitsize, bytepos;
272 switch (code)
274 case POINTER_PLUS_EXPR:
275 tree_to_aff_combination (op0, type, comb);
276 tree_to_aff_combination (op1, sizetype, &tmp);
277 aff_combination_add (comb, &tmp);
278 return true;
280 case PLUS_EXPR:
281 case MINUS_EXPR:
282 tree_to_aff_combination (op0, type, comb);
283 tree_to_aff_combination (op1, type, &tmp);
284 if (code == MINUS_EXPR)
285 aff_combination_scale (&tmp, -1);
286 aff_combination_add (comb, &tmp);
287 return true;
289 case MULT_EXPR:
290 if (TREE_CODE (op1) != INTEGER_CST)
291 break;
292 tree_to_aff_combination (op0, type, comb);
293 aff_combination_scale (comb, wi::to_widest (op1));
294 return true;
296 case NEGATE_EXPR:
297 tree_to_aff_combination (op0, type, comb);
298 aff_combination_scale (comb, -1);
299 return true;
301 case BIT_NOT_EXPR:
302 /* ~x = -x - 1 */
303 tree_to_aff_combination (op0, type, comb);
304 aff_combination_scale (comb, -1);
305 aff_combination_add_cst (comb, -1);
306 return true;
308 CASE_CONVERT:
310 tree otype = type;
311 tree inner = op0;
312 tree itype = TREE_TYPE (inner);
313 enum tree_code icode = TREE_CODE (inner);
315 /* STRIP_NOPS */
316 if (tree_nop_conversion_p (otype, itype))
318 tree_to_aff_combination (op0, type, comb);
319 return true;
322 /* In principle this is a valid folding, but it isn't necessarily
323 an optimization, so do it here and not in fold_unary. */
324 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
325 && TREE_CODE (itype) == INTEGER_TYPE
326 && TREE_CODE (otype) == INTEGER_TYPE
327 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
329 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
331 /* If inner type has undefined overflow behavior, fold conversion
332 for below two cases:
333 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
334 (T1)(X + X) -> (T1)X + (T1)X. */
335 if (TYPE_OVERFLOW_UNDEFINED (itype)
336 && (TREE_CODE (op1) == INTEGER_CST
337 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
339 op0 = fold_convert (otype, op0);
340 op1 = fold_convert (otype, op1);
341 return expr_to_aff_combination (comb, icode, otype, op0, op1);
343 wide_int minv, maxv;
344 /* If inner type has wrapping overflow behavior, fold conversion
345 for below case:
346 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
347 if X *+- CST doesn't overflow by range information. */
348 if (TYPE_UNSIGNED (itype)
349 && TYPE_OVERFLOW_WRAPS (itype)
350 && TREE_CODE (op1) == INTEGER_CST
351 && determine_value_range (op0, &minv, &maxv) == VR_RANGE)
353 wi::overflow_type overflow = wi::OVF_NONE;
354 signop sign = UNSIGNED;
355 if (icode == PLUS_EXPR)
356 wi::add (maxv, wi::to_wide (op1), sign, &overflow);
357 else if (icode == MULT_EXPR)
358 wi::mul (maxv, wi::to_wide (op1), sign, &overflow);
359 else
360 wi::sub (minv, wi::to_wide (op1), sign, &overflow);
362 if (overflow == wi::OVF_NONE)
364 op0 = fold_convert (otype, op0);
365 op1 = fold_convert (otype, op1);
366 return expr_to_aff_combination (comb, icode, otype, op0,
367 op1);
372 break;
374 default:;
377 return false;
380 /* Splits EXPR into an affine combination of parts. */
382 void
383 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
385 aff_tree tmp;
386 enum tree_code code;
387 tree core, toffset;
388 poly_int64 bitpos, bitsize, bytepos;
389 machine_mode mode;
390 int unsignedp, reversep, volatilep;
392 STRIP_NOPS (expr);
394 code = TREE_CODE (expr);
395 switch (code)
397 case POINTER_PLUS_EXPR:
398 case PLUS_EXPR:
399 case MINUS_EXPR:
400 case MULT_EXPR:
401 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
402 TREE_OPERAND (expr, 1)))
403 return;
404 break;
406 case NEGATE_EXPR:
407 case BIT_NOT_EXPR:
408 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
409 return;
410 break;
412 CASE_CONVERT:
413 /* ??? TREE_TYPE (expr) should be equal to type here, but IVOPTS
414 calls this with not showing an outer widening cast. */
415 if (expr_to_aff_combination (comb, code,
416 TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
418 aff_combination_convert (comb, type);
419 return;
421 break;
423 case ADDR_EXPR:
424 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
425 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
427 expr = TREE_OPERAND (expr, 0);
428 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
429 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
430 aff_combination_add (comb, &tmp);
431 return;
433 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
434 &toffset, &mode, &unsignedp, &reversep,
435 &volatilep);
436 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
437 break;
438 aff_combination_const (comb, type, bytepos);
439 if (TREE_CODE (core) == MEM_REF)
441 tree mem_offset = TREE_OPERAND (core, 1);
442 aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
443 core = TREE_OPERAND (core, 0);
445 else
446 core = build_fold_addr_expr (core);
448 if (TREE_CODE (core) == ADDR_EXPR)
449 aff_combination_add_elt (comb, core, 1);
450 else
452 tree_to_aff_combination (core, type, &tmp);
453 aff_combination_add (comb, &tmp);
455 if (toffset)
457 tree_to_aff_combination (toffset, type, &tmp);
458 aff_combination_add (comb, &tmp);
460 return;
462 default:
464 if (poly_int_tree_p (expr))
466 aff_combination_const (comb, type, wi::to_poly_widest (expr));
467 return;
469 break;
473 aff_combination_elt (comb, type, expr);
476 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
477 combination COMB. */
479 static tree
480 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
482 enum tree_code code;
484 widest_int scale = wide_int_ext_for_comb (scale_in, type);
486 elt = fold_convert (type, elt);
487 if (scale == 1)
489 if (!expr)
490 return elt;
492 return fold_build2 (PLUS_EXPR, type, expr, elt);
495 if (scale == -1)
497 if (!expr)
498 return fold_build1 (NEGATE_EXPR, type, elt);
500 return fold_build2 (MINUS_EXPR, type, expr, elt);
503 if (!expr)
504 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
506 if (wi::neg_p (scale))
508 code = MINUS_EXPR;
509 scale = -scale;
511 else
512 code = PLUS_EXPR;
514 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
515 return fold_build2 (code, type, expr, elt);
518 /* Makes tree from the affine combination COMB. */
520 tree
521 aff_combination_to_tree (aff_tree *comb)
523 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
524 unsigned i;
525 poly_widest_int off;
526 int sgn;
528 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
530 i = 0;
531 if (POINTER_TYPE_P (type))
533 type = sizetype;
534 if (comb->n > 0 && comb->elts[0].coef == 1
535 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
537 base = comb->elts[0].val;
538 ++i;
542 for (; i < comb->n; i++)
543 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
545 if (comb->rest)
546 expr = add_elt_to_tree (expr, type, comb->rest, 1);
548 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
549 unsigned. */
550 if (known_lt (comb->offset, 0))
552 off = -comb->offset;
553 sgn = -1;
555 else
557 off = comb->offset;
558 sgn = 1;
560 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
562 if (base)
563 return fold_build_pointer_plus (base, expr);
564 else
565 return fold_convert (comb->type, expr);
568 /* Copies the tree elements of COMB to ensure that they are not shared. */
570 void
571 unshare_aff_combination (aff_tree *comb)
573 unsigned i;
575 for (i = 0; i < comb->n; i++)
576 comb->elts[i].val = unshare_expr (comb->elts[i].val);
577 if (comb->rest)
578 comb->rest = unshare_expr (comb->rest);
581 /* Remove M-th element from COMB. */
583 void
584 aff_combination_remove_elt (aff_tree *comb, unsigned m)
586 comb->n--;
587 if (m <= comb->n)
588 comb->elts[m] = comb->elts[comb->n];
589 if (comb->rest)
591 comb->elts[comb->n].coef = 1;
592 comb->elts[comb->n].val = comb->rest;
593 comb->rest = NULL_TREE;
594 comb->n++;
598 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
599 C * COEF is added to R. */
602 static void
603 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
604 aff_tree *r)
606 unsigned i;
607 tree aval, type;
609 for (i = 0; i < c->n; i++)
611 aval = c->elts[i].val;
612 if (val)
614 type = TREE_TYPE (aval);
615 aval = fold_build2 (MULT_EXPR, type, aval,
616 fold_convert (type, val));
619 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
622 if (c->rest)
624 aval = c->rest;
625 if (val)
627 type = TREE_TYPE (aval);
628 aval = fold_build2 (MULT_EXPR, type, aval,
629 fold_convert (type, val));
632 aff_combination_add_elt (r, aval, coef);
635 if (val)
637 if (c->offset.is_constant ())
638 /* Access coeffs[0] directly, for efficiency. */
639 aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
640 else
642 /* c->offset is polynomial, so multiply VAL rather than COEF
643 by it. */
644 tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
645 val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
646 aff_combination_add_elt (r, val, coef);
649 else
650 aff_combination_add_cst (r, coef * c->offset);
653 /* Multiplies C1 by C2, storing the result to R */
655 void
656 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
658 unsigned i;
659 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
661 aff_combination_zero (r, c1->type);
663 for (i = 0; i < c2->n; i++)
664 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
665 if (c2->rest)
666 aff_combination_add_product (c1, 1, c2->rest, r);
667 if (c2->offset.is_constant ())
668 /* Access coeffs[0] directly, for efficiency. */
669 aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r);
670 else
672 /* c2->offset is polynomial, so do the multiplication in tree form. */
673 tree offset = wide_int_to_tree (c2->type, c2->offset);
674 aff_combination_add_product (c1, 1, offset, r);
678 /* Returns the element of COMB whose value is VAL, or NULL if no such
679 element exists. If IDX is not NULL, it is set to the index of VAL in
680 COMB. */
682 static class aff_comb_elt *
683 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
685 unsigned i;
687 for (i = 0; i < comb->n; i++)
688 if (operand_equal_p (comb->elts[i].val, val, 0))
690 if (idx)
691 *idx = i;
693 return &comb->elts[i];
696 return NULL;
699 /* Element of the cache that maps ssa name NAME to its expanded form
700 as an affine expression EXPANSION. */
702 class name_expansion
704 public:
705 aff_tree expansion;
707 /* True if the expansion for the name is just being generated. */
708 unsigned in_progress : 1;
711 /* Expands SSA names in COMB recursively. CACHE is used to cache the
712 results. */
714 void
715 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
716 hash_map<tree, name_expansion *> **cache)
718 unsigned i;
719 aff_tree to_add, current, curre;
720 tree e;
721 gimple *def;
722 widest_int scale;
723 class name_expansion *exp;
725 aff_combination_zero (&to_add, comb->type);
726 for (i = 0; i < comb->n; i++)
728 tree type, name;
729 enum tree_code code;
731 e = comb->elts[i].val;
732 type = TREE_TYPE (e);
733 name = e;
734 /* Look through some conversions. */
735 if (CONVERT_EXPR_P (e)
736 && (TYPE_PRECISION (type)
737 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
738 name = TREE_OPERAND (e, 0);
739 if (TREE_CODE (name) != SSA_NAME)
740 continue;
741 def = SSA_NAME_DEF_STMT (name);
742 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
743 continue;
745 code = gimple_assign_rhs_code (def);
746 if (code != SSA_NAME
747 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
748 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
749 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
750 continue;
752 /* We do not know whether the reference retains its value at the
753 place where the expansion is used. */
754 if (TREE_CODE_CLASS (code) == tcc_reference)
755 continue;
757 name_expansion **slot = NULL;
758 if (*cache)
759 slot = (*cache)->get (name);
760 exp = slot ? *slot : NULL;
761 if (!exp)
763 /* Only bother to handle cases tree_to_aff_combination will. */
764 switch (code)
766 case POINTER_PLUS_EXPR:
767 case PLUS_EXPR:
768 case MINUS_EXPR:
769 case MULT_EXPR:
770 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
771 gimple_assign_rhs1 (def),
772 gimple_assign_rhs2 (def)))
773 continue;
774 break;
775 case NEGATE_EXPR:
776 case BIT_NOT_EXPR:
777 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
778 gimple_assign_rhs1 (def)))
779 continue;
780 break;
781 CASE_CONVERT:
782 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
783 gimple_assign_rhs1 (def)))
784 /* This makes us always expand conversions which we did
785 in the past and makes gcc.dg/tree-ssa/ivopts-lt-2.c
786 PASS, eliminating one induction variable in IVOPTs.
787 ??? But it is really excessive and we should try
788 harder to do without it. */
789 aff_combination_elt (&current, TREE_TYPE (name),
790 fold_convert (TREE_TYPE (name),
791 gimple_assign_rhs1 (def)));
792 break;
793 case ADDR_EXPR:
794 case INTEGER_CST:
795 case POLY_INT_CST:
796 tree_to_aff_combination (gimple_assign_rhs1 (def),
797 TREE_TYPE (name), &current);
798 break;
799 default:
800 continue;
802 exp = XNEW (class name_expansion);
803 exp->in_progress = 1;
804 if (!*cache)
805 *cache = new hash_map<tree, name_expansion *>;
806 (*cache)->put (name, exp);
807 aff_combination_expand (&current, cache);
808 exp->expansion = current;
809 exp->in_progress = 0;
811 else
813 /* Since we follow the definitions in the SSA form, we should not
814 enter a cycle unless we pass through a phi node. */
815 gcc_assert (!exp->in_progress);
816 current = exp->expansion;
818 if (!useless_type_conversion_p (comb->type, current.type))
819 aff_combination_convert (&current, comb->type);
821 /* Accumulate the new terms to TO_ADD, so that we do not modify
822 COMB while traversing it; include the term -coef * E, to remove
823 it from COMB. */
824 scale = comb->elts[i].coef;
825 aff_combination_zero (&curre, comb->type);
826 aff_combination_add_elt (&curre, e, -scale);
827 aff_combination_scale (&current, scale);
828 aff_combination_add (&to_add, &current);
829 aff_combination_add (&to_add, &curre);
831 aff_combination_add (comb, &to_add);
834 /* Similar to tree_to_aff_combination, but follows SSA name definitions
835 and expands them recursively. CACHE is used to cache the expansions
836 of the ssa names, to avoid exponential time complexity for cases
837 like
839 a1 = a0 + a0;
840 a2 = a1 + a1;
841 a3 = a2 + a2;
842 ... */
844 void
845 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
846 hash_map<tree, name_expansion *> **cache)
848 tree_to_aff_combination (expr, type, comb);
849 aff_combination_expand (comb, cache);
852 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
853 hash_map::traverse. */
855 bool
856 free_name_expansion (tree const &, name_expansion **value, void *)
858 free (*value);
859 return true;
862 /* Frees memory allocated for the CACHE used by
863 tree_to_aff_combination_expand. */
865 void
866 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
868 if (!*cache)
869 return;
871 (*cache)->traverse<void *, free_name_expansion> (NULL);
872 delete (*cache);
873 *cache = NULL;
876 /* If VAL != CST * DIV for any constant CST, returns false.
877 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
878 and if they are different, returns false. Finally, if neither of these
879 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
880 is set to true. */
882 static bool
883 wide_int_constant_multiple_p (const poly_widest_int &val,
884 const poly_widest_int &div,
885 bool *mult_set, poly_widest_int *mult)
887 poly_widest_int rem, cst;
889 if (known_eq (val, 0))
891 if (*mult_set && maybe_ne (*mult, 0))
892 return false;
893 *mult_set = true;
894 *mult = 0;
895 return true;
898 if (maybe_eq (div, 0))
899 return false;
901 if (!multiple_p (val, div, &cst))
902 return false;
904 if (*mult_set && maybe_ne (*mult, cst))
905 return false;
907 *mult_set = true;
908 *mult = cst;
909 return true;
912 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
913 X is stored to MULT. */
915 bool
916 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
917 poly_widest_int *mult)
919 bool mult_set = false;
920 unsigned i;
922 if (val->n == 0 && known_eq (val->offset, 0))
924 *mult = 0;
925 return true;
927 if (val->n != div->n)
928 return false;
930 if (val->rest || div->rest)
931 return false;
933 if (!wide_int_constant_multiple_p (val->offset, div->offset,
934 &mult_set, mult))
935 return false;
937 for (i = 0; i < div->n; i++)
939 class aff_comb_elt *elt
940 = aff_combination_find_elt (val, div->elts[i].val, NULL);
941 if (!elt)
942 return false;
943 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
944 &mult_set, mult))
945 return false;
948 gcc_assert (mult_set);
949 return true;
952 /* Prints the affine VAL to the FILE. */
954 static void
955 print_aff (FILE *file, aff_tree *val)
957 unsigned i;
958 signop sgn = TYPE_SIGN (val->type);
959 if (POINTER_TYPE_P (val->type))
960 sgn = SIGNED;
961 fprintf (file, "{\n type = ");
962 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
963 fprintf (file, "\n offset = ");
964 print_dec (val->offset, file, sgn);
965 if (val->n > 0)
967 fprintf (file, "\n elements = {\n");
968 for (i = 0; i < val->n; i++)
970 fprintf (file, " [%d] = ", i);
971 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
973 fprintf (file, " * ");
974 print_dec (val->elts[i].coef, file, sgn);
975 if (i != val->n - 1)
976 fprintf (file, ", \n");
978 fprintf (file, "\n }");
980 if (val->rest)
982 fprintf (file, "\n rest = ");
983 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
985 fprintf (file, "\n}");
988 /* Prints the affine VAL to the standard error, used for debugging. */
990 DEBUG_FUNCTION void
991 debug_aff (aff_tree *val)
993 print_aff (stderr, val);
994 fprintf (stderr, "\n");
997 /* Computes address of the reference REF in ADDR. The size of the accessed
998 location is stored to SIZE. Returns the ultimate containing object to
999 which REF refers. */
1001 tree
1002 get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
1004 poly_int64 bitsize, bitpos;
1005 tree toff;
1006 machine_mode mode;
1007 int uns, rev, vol;
1008 aff_tree tmp;
1009 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
1010 &uns, &rev, &vol);
1011 tree base_addr = build_fold_addr_expr (base);
1013 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
1015 tree_to_aff_combination (base_addr, sizetype, addr);
1017 if (toff)
1019 tree_to_aff_combination (toff, sizetype, &tmp);
1020 aff_combination_add (addr, &tmp);
1023 aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
1024 aff_combination_add (addr, &tmp);
1026 *size = bits_to_bytes_round_up (bitsize);
1028 return base;
1031 /* Returns true if a region of size SIZE1 at position 0 and a region of
1032 size SIZE2 at position DIFF cannot overlap. */
1034 bool
1035 aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1,
1036 const poly_widest_int &size2)
1038 /* Unless the difference is a constant, we fail. */
1039 if (diff->n != 0)
1040 return false;
1042 if (!ordered_p (diff->offset, 0))
1043 return false;
1045 if (maybe_lt (diff->offset, 0))
1047 /* The second object is before the first one, we succeed if the last
1048 element of the second object is before the start of the first one. */
1049 return known_le (diff->offset + size2, 0);
1051 else
1053 /* We succeed if the second object starts after the first one ends. */
1054 return known_le (size1, diff->offset);