typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / tree-affine.c
blob976cf3c6cf67765c508b823ce695ba5e4851508b
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
31 #include "gimplify.h"
32 #include "dumpfile.h"
33 #include "cfgexpand.h"
35 /* Extends CST as appropriate for the affine combinations COMB. */
37 static widest_int
38 wide_int_ext_for_comb (const widest_int &cst, tree type)
40 return wi::sext (cst, TYPE_PRECISION (type));
43 /* Likewise for polynomial offsets. */
45 static poly_widest_int
46 wide_int_ext_for_comb (const poly_widest_int &cst, tree type)
48 return wi::sext (cst, TYPE_PRECISION (type));
51 /* Initializes affine combination COMB so that its value is zero in TYPE. */
53 static void
54 aff_combination_zero (aff_tree *comb, tree type)
56 int i;
57 comb->type = type;
58 comb->offset = 0;
59 comb->n = 0;
60 for (i = 0; i < MAX_AFF_ELTS; i++)
61 comb->elts[i].coef = 0;
62 comb->rest = NULL_TREE;
65 /* Sets COMB to CST. */
67 void
68 aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst)
70 aff_combination_zero (comb, type);
71 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
74 /* Sets COMB to single element ELT. */
76 void
77 aff_combination_elt (aff_tree *comb, tree type, tree elt)
79 aff_combination_zero (comb, type);
81 comb->n = 1;
82 comb->elts[0].val = elt;
83 comb->elts[0].coef = 1;
86 /* Scales COMB by SCALE. */
88 void
89 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
91 unsigned i, j;
93 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
94 if (scale == 1)
95 return;
97 if (scale == 0)
99 aff_combination_zero (comb, comb->type);
100 return;
103 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
104 for (i = 0, j = 0; i < comb->n; i++)
106 widest_int new_coef
107 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
108 /* A coefficient may become zero due to overflow. Remove the zero
109 elements. */
110 if (new_coef == 0)
111 continue;
112 comb->elts[j].coef = new_coef;
113 comb->elts[j].val = comb->elts[i].val;
114 j++;
116 comb->n = j;
118 if (comb->rest)
120 tree type = comb->type;
121 if (POINTER_TYPE_P (type))
122 type = sizetype;
123 if (comb->n < MAX_AFF_ELTS)
125 comb->elts[comb->n].coef = scale;
126 comb->elts[comb->n].val = comb->rest;
127 comb->rest = NULL_TREE;
128 comb->n++;
130 else
131 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
132 wide_int_to_tree (type, scale));
136 /* Adds ELT * SCALE to COMB. */
138 void
139 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
141 unsigned i;
142 tree type;
144 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
145 if (scale == 0)
146 return;
148 for (i = 0; i < comb->n; i++)
149 if (operand_equal_p (comb->elts[i].val, elt, 0))
151 widest_int new_coef
152 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
153 if (new_coef != 0)
155 comb->elts[i].coef = new_coef;
156 return;
159 comb->n--;
160 comb->elts[i] = comb->elts[comb->n];
162 if (comb->rest)
164 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
165 comb->elts[comb->n].coef = 1;
166 comb->elts[comb->n].val = comb->rest;
167 comb->rest = NULL_TREE;
168 comb->n++;
170 return;
172 if (comb->n < MAX_AFF_ELTS)
174 comb->elts[comb->n].coef = scale;
175 comb->elts[comb->n].val = elt;
176 comb->n++;
177 return;
180 type = comb->type;
181 if (POINTER_TYPE_P (type))
182 type = sizetype;
184 if (scale == 1)
185 elt = fold_convert (type, elt);
186 else
187 elt = fold_build2 (MULT_EXPR, type,
188 fold_convert (type, elt),
189 wide_int_to_tree (type, scale));
191 if (comb->rest)
192 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
193 elt);
194 else
195 comb->rest = elt;
198 /* Adds CST to C. */
200 static void
201 aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst)
203 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
206 /* Adds COMB2 to COMB1. */
208 void
209 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
211 unsigned i;
213 aff_combination_add_cst (comb1, comb2->offset);
214 for (i = 0; i < comb2->n; i++)
215 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
216 if (comb2->rest)
217 aff_combination_add_elt (comb1, comb2->rest, 1);
220 /* Converts affine combination COMB to TYPE. */
222 void
223 aff_combination_convert (aff_tree *comb, tree type)
225 unsigned i, j;
226 tree comb_type = comb->type;
228 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
230 tree val = fold_convert (type, aff_combination_to_tree (comb));
231 tree_to_aff_combination (val, type, comb);
232 return;
235 comb->type = type;
236 if (comb->rest && !POINTER_TYPE_P (type))
237 comb->rest = fold_convert (type, comb->rest);
239 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
240 return;
242 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
243 for (i = j = 0; i < comb->n; i++)
245 if (comb->elts[i].coef == 0)
246 continue;
247 comb->elts[j].coef = comb->elts[i].coef;
248 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
249 j++;
252 comb->n = j;
253 if (comb->n < MAX_AFF_ELTS && comb->rest)
255 comb->elts[comb->n].coef = 1;
256 comb->elts[comb->n].val = comb->rest;
257 comb->rest = NULL_TREE;
258 comb->n++;
262 /* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
263 true when that was successful and returns the combination in COMB. */
265 static bool
266 expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
267 tree op0, tree op1 = NULL_TREE)
269 aff_tree tmp;
270 poly_int64 bitpos, bitsize, bytepos;
272 switch (code)
274 case POINTER_PLUS_EXPR:
275 tree_to_aff_combination (op0, type, comb);
276 tree_to_aff_combination (op1, sizetype, &tmp);
277 aff_combination_add (comb, &tmp);
278 return true;
280 case PLUS_EXPR:
281 case MINUS_EXPR:
282 tree_to_aff_combination (op0, type, comb);
283 tree_to_aff_combination (op1, type, &tmp);
284 if (code == MINUS_EXPR)
285 aff_combination_scale (&tmp, -1);
286 aff_combination_add (comb, &tmp);
287 return true;
289 case MULT_EXPR:
290 if (TREE_CODE (op1) != INTEGER_CST)
291 break;
292 tree_to_aff_combination (op0, type, comb);
293 aff_combination_scale (comb, wi::to_widest (op1));
294 return true;
296 case NEGATE_EXPR:
297 tree_to_aff_combination (op0, type, comb);
298 aff_combination_scale (comb, -1);
299 return true;
301 case BIT_NOT_EXPR:
302 /* ~x = -x - 1 */
303 tree_to_aff_combination (op0, type, comb);
304 aff_combination_scale (comb, -1);
305 aff_combination_add_cst (comb, -1);
306 return true;
308 CASE_CONVERT:
310 tree otype = type;
311 tree inner = op0;
312 tree itype = TREE_TYPE (inner);
313 enum tree_code icode = TREE_CODE (inner);
315 /* STRIP_NOPS */
316 if (tree_nop_conversion_p (otype, itype))
318 tree_to_aff_combination (op0, type, comb);
319 return true;
322 /* In principle this is a valid folding, but it isn't necessarily
323 an optimization, so do it here and not in fold_unary. */
324 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
325 && TREE_CODE (itype) == INTEGER_TYPE
326 && TREE_CODE (otype) == INTEGER_TYPE
327 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
329 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
331 /* If inner type has undefined overflow behavior, fold conversion
332 for below two cases:
333 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
334 (T1)(X + X) -> (T1)X + (T1)X. */
335 if (TYPE_OVERFLOW_UNDEFINED (itype)
336 && (TREE_CODE (op1) == INTEGER_CST
337 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
339 op0 = fold_convert (otype, op0);
340 op1 = fold_convert (otype, op1);
341 return expr_to_aff_combination (comb, icode, otype, op0, op1);
343 wide_int minv, maxv;
344 /* If inner type has wrapping overflow behavior, fold conversion
345 for below case:
346 (T1)(X - CST) -> (T1)X - (T1)CST
347 if X - CST doesn't overflow by range information. Also handle
348 (T1)(X + CST) as (T1)(X - (-CST)). */
349 if (TYPE_UNSIGNED (itype)
350 && TYPE_OVERFLOW_WRAPS (itype)
351 && TREE_CODE (op0) == SSA_NAME
352 && TREE_CODE (op1) == INTEGER_CST
353 && icode != MULT_EXPR
354 && get_range_info (op0, &minv, &maxv) == VR_RANGE)
356 if (icode == PLUS_EXPR)
357 op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
358 if (wi::geu_p (minv, wi::to_wide (op1)))
360 op0 = fold_convert (otype, op0);
361 op1 = fold_convert (otype, op1);
362 return expr_to_aff_combination (comb, MINUS_EXPR, otype,
363 op0, op1);
368 break;
370 default:;
373 return false;
376 /* Splits EXPR into an affine combination of parts. */
378 void
379 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
381 aff_tree tmp;
382 enum tree_code code;
383 tree core, toffset;
384 poly_int64 bitpos, bitsize, bytepos;
385 machine_mode mode;
386 int unsignedp, reversep, volatilep;
388 STRIP_NOPS (expr);
390 code = TREE_CODE (expr);
391 switch (code)
393 case POINTER_PLUS_EXPR:
394 case PLUS_EXPR:
395 case MINUS_EXPR:
396 case MULT_EXPR:
397 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
398 TREE_OPERAND (expr, 1)))
399 return;
400 break;
402 case NEGATE_EXPR:
403 case BIT_NOT_EXPR:
404 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
405 return;
406 break;
408 CASE_CONVERT:
409 /* ??? TREE_TYPE (expr) should be equal to type here, but IVOPTS
410 calls this with not showing an outer widening cast. */
411 if (expr_to_aff_combination (comb, code,
412 TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
414 aff_combination_convert (comb, type);
415 return;
417 break;
419 case ADDR_EXPR:
420 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
421 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
423 expr = TREE_OPERAND (expr, 0);
424 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
425 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
426 aff_combination_add (comb, &tmp);
427 return;
429 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
430 &toffset, &mode, &unsignedp, &reversep,
431 &volatilep);
432 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
433 break;
434 aff_combination_const (comb, type, bytepos);
435 if (TREE_CODE (core) == MEM_REF)
437 tree mem_offset = TREE_OPERAND (core, 1);
438 aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
439 core = TREE_OPERAND (core, 0);
441 else
442 core = build_fold_addr_expr (core);
444 if (TREE_CODE (core) == ADDR_EXPR)
445 aff_combination_add_elt (comb, core, 1);
446 else
448 tree_to_aff_combination (core, type, &tmp);
449 aff_combination_add (comb, &tmp);
451 if (toffset)
453 tree_to_aff_combination (toffset, type, &tmp);
454 aff_combination_add (comb, &tmp);
456 return;
458 default:
460 if (poly_int_tree_p (expr))
462 aff_combination_const (comb, type, wi::to_poly_widest (expr));
463 return;
465 break;
469 aff_combination_elt (comb, type, expr);
472 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
473 combination COMB. */
475 static tree
476 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
478 enum tree_code code;
480 widest_int scale = wide_int_ext_for_comb (scale_in, type);
482 elt = fold_convert (type, elt);
483 if (scale == 1)
485 if (!expr)
486 return elt;
488 return fold_build2 (PLUS_EXPR, type, expr, elt);
491 if (scale == -1)
493 if (!expr)
494 return fold_build1 (NEGATE_EXPR, type, elt);
496 return fold_build2 (MINUS_EXPR, type, expr, elt);
499 if (!expr)
500 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
502 if (wi::neg_p (scale))
504 code = MINUS_EXPR;
505 scale = -scale;
507 else
508 code = PLUS_EXPR;
510 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
511 return fold_build2 (code, type, expr, elt);
514 /* Makes tree from the affine combination COMB. */
516 tree
517 aff_combination_to_tree (aff_tree *comb)
519 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
520 unsigned i;
521 poly_widest_int off;
522 int sgn;
524 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
526 i = 0;
527 if (POINTER_TYPE_P (type))
529 type = sizetype;
530 if (comb->n > 0 && comb->elts[0].coef == 1
531 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
533 base = comb->elts[0].val;
534 ++i;
538 for (; i < comb->n; i++)
539 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
541 if (comb->rest)
542 expr = add_elt_to_tree (expr, type, comb->rest, 1);
544 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
545 unsigned. */
546 if (known_lt (comb->offset, 0))
548 off = -comb->offset;
549 sgn = -1;
551 else
553 off = comb->offset;
554 sgn = 1;
556 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
558 if (base)
559 return fold_build_pointer_plus (base, expr);
560 else
561 return fold_convert (comb->type, expr);
564 /* Copies the tree elements of COMB to ensure that they are not shared. */
566 void
567 unshare_aff_combination (aff_tree *comb)
569 unsigned i;
571 for (i = 0; i < comb->n; i++)
572 comb->elts[i].val = unshare_expr (comb->elts[i].val);
573 if (comb->rest)
574 comb->rest = unshare_expr (comb->rest);
577 /* Remove M-th element from COMB. */
579 void
580 aff_combination_remove_elt (aff_tree *comb, unsigned m)
582 comb->n--;
583 if (m <= comb->n)
584 comb->elts[m] = comb->elts[comb->n];
585 if (comb->rest)
587 comb->elts[comb->n].coef = 1;
588 comb->elts[comb->n].val = comb->rest;
589 comb->rest = NULL_TREE;
590 comb->n++;
594 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
595 C * COEF is added to R. */
598 static void
599 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
600 aff_tree *r)
602 unsigned i;
603 tree aval, type;
605 for (i = 0; i < c->n; i++)
607 aval = c->elts[i].val;
608 if (val)
610 type = TREE_TYPE (aval);
611 aval = fold_build2 (MULT_EXPR, type, aval,
612 fold_convert (type, val));
615 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
618 if (c->rest)
620 aval = c->rest;
621 if (val)
623 type = TREE_TYPE (aval);
624 aval = fold_build2 (MULT_EXPR, type, aval,
625 fold_convert (type, val));
628 aff_combination_add_elt (r, aval, coef);
631 if (val)
633 if (c->offset.is_constant ())
634 /* Access coeffs[0] directly, for efficiency. */
635 aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
636 else
638 /* c->offset is polynomial, so multiply VAL rather than COEF
639 by it. */
640 tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
641 val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
642 aff_combination_add_elt (r, val, coef);
645 else
646 aff_combination_add_cst (r, coef * c->offset);
649 /* Multiplies C1 by C2, storing the result to R */
651 void
652 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
654 unsigned i;
655 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
657 aff_combination_zero (r, c1->type);
659 for (i = 0; i < c2->n; i++)
660 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
661 if (c2->rest)
662 aff_combination_add_product (c1, 1, c2->rest, r);
663 if (c2->offset.is_constant ())
664 /* Access coeffs[0] directly, for efficiency. */
665 aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r);
666 else
668 /* c2->offset is polynomial, so do the multiplication in tree form. */
669 tree offset = wide_int_to_tree (c2->type, c2->offset);
670 aff_combination_add_product (c1, 1, offset, r);
674 /* Returns the element of COMB whose value is VAL, or NULL if no such
675 element exists. If IDX is not NULL, it is set to the index of VAL in
676 COMB. */
678 static class aff_comb_elt *
679 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
681 unsigned i;
683 for (i = 0; i < comb->n; i++)
684 if (operand_equal_p (comb->elts[i].val, val, 0))
686 if (idx)
687 *idx = i;
689 return &comb->elts[i];
692 return NULL;
695 /* Element of the cache that maps ssa name NAME to its expanded form
696 as an affine expression EXPANSION. */
698 class name_expansion
700 public:
701 aff_tree expansion;
703 /* True if the expansion for the name is just being generated. */
704 unsigned in_progress : 1;
707 /* Expands SSA names in COMB recursively. CACHE is used to cache the
708 results. */
710 void
711 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
712 hash_map<tree, name_expansion *> **cache)
714 unsigned i;
715 aff_tree to_add, current, curre;
716 tree e;
717 gimple *def;
718 widest_int scale;
719 class name_expansion *exp;
721 aff_combination_zero (&to_add, comb->type);
722 for (i = 0; i < comb->n; i++)
724 tree type, name;
725 enum tree_code code;
727 e = comb->elts[i].val;
728 type = TREE_TYPE (e);
729 name = e;
730 /* Look through some conversions. */
731 if (CONVERT_EXPR_P (e)
732 && (TYPE_PRECISION (type)
733 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
734 name = TREE_OPERAND (e, 0);
735 if (TREE_CODE (name) != SSA_NAME)
736 continue;
737 def = SSA_NAME_DEF_STMT (name);
738 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
739 continue;
741 code = gimple_assign_rhs_code (def);
742 if (code != SSA_NAME
743 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
744 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
745 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
746 continue;
748 /* We do not know whether the reference retains its value at the
749 place where the expansion is used. */
750 if (TREE_CODE_CLASS (code) == tcc_reference)
751 continue;
753 name_expansion **slot = NULL;
754 if (*cache)
755 slot = (*cache)->get (name);
756 exp = slot ? *slot : NULL;
757 if (!exp)
759 /* Only bother to handle cases tree_to_aff_combination will. */
760 switch (code)
762 case POINTER_PLUS_EXPR:
763 case PLUS_EXPR:
764 case MINUS_EXPR:
765 case MULT_EXPR:
766 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
767 gimple_assign_rhs1 (def),
768 gimple_assign_rhs2 (def)))
769 continue;
770 break;
771 case NEGATE_EXPR:
772 case BIT_NOT_EXPR:
773 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
774 gimple_assign_rhs1 (def)))
775 continue;
776 break;
777 CASE_CONVERT:
778 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
779 gimple_assign_rhs1 (def)))
780 /* This makes us always expand conversions which we did
781 in the past and makes gcc.dg/tree-ssa/ivopts-lt-2.c
782 PASS, eliminating one induction variable in IVOPTs.
783 ??? But it is really excessive and we should try
784 harder to do without it. */
785 aff_combination_elt (&current, TREE_TYPE (name),
786 fold_convert (TREE_TYPE (name),
787 gimple_assign_rhs1 (def)));
788 break;
789 case ADDR_EXPR:
790 case INTEGER_CST:
791 case POLY_INT_CST:
792 tree_to_aff_combination (gimple_assign_rhs1 (def),
793 TREE_TYPE (name), &current);
794 break;
795 default:
796 continue;
798 exp = XNEW (class name_expansion);
799 exp->in_progress = 1;
800 if (!*cache)
801 *cache = new hash_map<tree, name_expansion *>;
802 (*cache)->put (name, exp);
803 aff_combination_expand (&current, cache);
804 exp->expansion = current;
805 exp->in_progress = 0;
807 else
809 /* Since we follow the definitions in the SSA form, we should not
810 enter a cycle unless we pass through a phi node. */
811 gcc_assert (!exp->in_progress);
812 current = exp->expansion;
814 if (!useless_type_conversion_p (comb->type, current.type))
815 aff_combination_convert (&current, comb->type);
817 /* Accumulate the new terms to TO_ADD, so that we do not modify
818 COMB while traversing it; include the term -coef * E, to remove
819 it from COMB. */
820 scale = comb->elts[i].coef;
821 aff_combination_zero (&curre, comb->type);
822 aff_combination_add_elt (&curre, e, -scale);
823 aff_combination_scale (&current, scale);
824 aff_combination_add (&to_add, &current);
825 aff_combination_add (&to_add, &curre);
827 aff_combination_add (comb, &to_add);
830 /* Similar to tree_to_aff_combination, but follows SSA name definitions
831 and expands them recursively. CACHE is used to cache the expansions
832 of the ssa names, to avoid exponential time complexity for cases
833 like
835 a1 = a0 + a0;
836 a2 = a1 + a1;
837 a3 = a2 + a2;
838 ... */
840 void
841 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
842 hash_map<tree, name_expansion *> **cache)
844 tree_to_aff_combination (expr, type, comb);
845 aff_combination_expand (comb, cache);
848 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
849 hash_map::traverse. */
851 bool
852 free_name_expansion (tree const &, name_expansion **value, void *)
854 free (*value);
855 return true;
858 /* Frees memory allocated for the CACHE used by
859 tree_to_aff_combination_expand. */
861 void
862 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
864 if (!*cache)
865 return;
867 (*cache)->traverse<void *, free_name_expansion> (NULL);
868 delete (*cache);
869 *cache = NULL;
872 /* If VAL != CST * DIV for any constant CST, returns false.
873 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
874 and if they are different, returns false. Finally, if neither of these
875 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
876 is set to true. */
878 static bool
879 wide_int_constant_multiple_p (const poly_widest_int &val,
880 const poly_widest_int &div,
881 bool *mult_set, poly_widest_int *mult)
883 poly_widest_int rem, cst;
885 if (known_eq (val, 0))
887 if (*mult_set && maybe_ne (*mult, 0))
888 return false;
889 *mult_set = true;
890 *mult = 0;
891 return true;
894 if (maybe_eq (div, 0))
895 return false;
897 if (!multiple_p (val, div, &cst))
898 return false;
900 if (*mult_set && maybe_ne (*mult, cst))
901 return false;
903 *mult_set = true;
904 *mult = cst;
905 return true;
908 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
909 X is stored to MULT. */
911 bool
912 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
913 poly_widest_int *mult)
915 bool mult_set = false;
916 unsigned i;
918 if (val->n == 0 && known_eq (val->offset, 0))
920 *mult = 0;
921 return true;
923 if (val->n != div->n)
924 return false;
926 if (val->rest || div->rest)
927 return false;
929 if (!wide_int_constant_multiple_p (val->offset, div->offset,
930 &mult_set, mult))
931 return false;
933 for (i = 0; i < div->n; i++)
935 class aff_comb_elt *elt
936 = aff_combination_find_elt (val, div->elts[i].val, NULL);
937 if (!elt)
938 return false;
939 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
940 &mult_set, mult))
941 return false;
944 gcc_assert (mult_set);
945 return true;
948 /* Prints the affine VAL to the FILE. */
950 static void
951 print_aff (FILE *file, aff_tree *val)
953 unsigned i;
954 signop sgn = TYPE_SIGN (val->type);
955 if (POINTER_TYPE_P (val->type))
956 sgn = SIGNED;
957 fprintf (file, "{\n type = ");
958 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
959 fprintf (file, "\n offset = ");
960 print_dec (val->offset, file, sgn);
961 if (val->n > 0)
963 fprintf (file, "\n elements = {\n");
964 for (i = 0; i < val->n; i++)
966 fprintf (file, " [%d] = ", i);
967 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
969 fprintf (file, " * ");
970 print_dec (val->elts[i].coef, file, sgn);
971 if (i != val->n - 1)
972 fprintf (file, ", \n");
974 fprintf (file, "\n }");
976 if (val->rest)
978 fprintf (file, "\n rest = ");
979 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
981 fprintf (file, "\n}");
984 /* Prints the affine VAL to the standard error, used for debugging. */
986 DEBUG_FUNCTION void
987 debug_aff (aff_tree *val)
989 print_aff (stderr, val);
990 fprintf (stderr, "\n");
993 /* Computes address of the reference REF in ADDR. The size of the accessed
994 location is stored to SIZE. Returns the ultimate containing object to
995 which REF refers. */
997 tree
998 get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
1000 poly_int64 bitsize, bitpos;
1001 tree toff;
1002 machine_mode mode;
1003 int uns, rev, vol;
1004 aff_tree tmp;
1005 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
1006 &uns, &rev, &vol);
1007 tree base_addr = build_fold_addr_expr (base);
1009 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
1011 tree_to_aff_combination (base_addr, sizetype, addr);
1013 if (toff)
1015 tree_to_aff_combination (toff, sizetype, &tmp);
1016 aff_combination_add (addr, &tmp);
1019 aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
1020 aff_combination_add (addr, &tmp);
1022 *size = bits_to_bytes_round_up (bitsize);
1024 return base;
1027 /* Returns true if a region of size SIZE1 at position 0 and a region of
1028 size SIZE2 at position DIFF cannot overlap. */
1030 bool
1031 aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1,
1032 const poly_widest_int &size2)
1034 /* Unless the difference is a constant, we fail. */
1035 if (diff->n != 0)
1036 return false;
1038 if (!ordered_p (diff->offset, 0))
1039 return false;
1041 if (maybe_lt (diff->offset, 0))
1043 /* The second object is before the first one, we succeed if the last
1044 element of the second object is before the start of the first one. */
1045 return known_le (diff->offset + size2, 0);
1047 else
1049 /* We succeed if the second object starts after the first one ends. */
1050 return known_le (size1, diff->offset);