gcc/
[official-gcc.git] / gcc / tree-affine.c
blob4ec801218ab2ea9e8604a874f526d3ee75db6f4c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "alias.h"
24 #include "symtab.h"
25 #include "options.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "tm.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "rtl.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "tree-pretty-print.h"
43 #include "tree-affine.h"
44 #include "predict.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "dumpfile.h"
52 #include "cfgexpand.h"
53 #include "wide-int-print.h"
55 /* Extends CST as appropriate for the affine combinations COMB. */
57 widest_int
58 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
60 return wi::sext (cst, TYPE_PRECISION (comb->type));
63 /* Initializes affine combination COMB so that its value is zero in TYPE. */
65 static void
66 aff_combination_zero (aff_tree *comb, tree type)
68 int i;
69 comb->type = type;
70 comb->offset = 0;
71 comb->n = 0;
72 for (i = 0; i < MAX_AFF_ELTS; i++)
73 comb->elts[i].coef = 0;
74 comb->rest = NULL_TREE;
77 /* Sets COMB to CST. */
79 void
80 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
82 aff_combination_zero (comb, type);
83 comb->offset = wide_int_ext_for_comb (cst, comb);;
86 /* Sets COMB to single element ELT. */
88 void
89 aff_combination_elt (aff_tree *comb, tree type, tree elt)
91 aff_combination_zero (comb, type);
93 comb->n = 1;
94 comb->elts[0].val = elt;
95 comb->elts[0].coef = 1;
98 /* Scales COMB by SCALE. */
100 void
101 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
103 unsigned i, j;
105 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
106 if (scale == 1)
107 return;
109 if (scale == 0)
111 aff_combination_zero (comb, comb->type);
112 return;
115 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
116 for (i = 0, j = 0; i < comb->n; i++)
118 widest_int new_coef
119 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
120 /* A coefficient may become zero due to overflow. Remove the zero
121 elements. */
122 if (new_coef == 0)
123 continue;
124 comb->elts[j].coef = new_coef;
125 comb->elts[j].val = comb->elts[i].val;
126 j++;
128 comb->n = j;
130 if (comb->rest)
132 tree type = comb->type;
133 if (POINTER_TYPE_P (type))
134 type = sizetype;
135 if (comb->n < MAX_AFF_ELTS)
137 comb->elts[comb->n].coef = scale;
138 comb->elts[comb->n].val = comb->rest;
139 comb->rest = NULL_TREE;
140 comb->n++;
142 else
143 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
144 wide_int_to_tree (type, scale));
148 /* Adds ELT * SCALE to COMB. */
150 void
151 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
153 unsigned i;
154 tree type;
156 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
157 if (scale == 0)
158 return;
160 for (i = 0; i < comb->n; i++)
161 if (operand_equal_p (comb->elts[i].val, elt, 0))
163 widest_int new_coef
164 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
165 if (new_coef != 0)
167 comb->elts[i].coef = new_coef;
168 return;
171 comb->n--;
172 comb->elts[i] = comb->elts[comb->n];
174 if (comb->rest)
176 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
177 comb->elts[comb->n].coef = 1;
178 comb->elts[comb->n].val = comb->rest;
179 comb->rest = NULL_TREE;
180 comb->n++;
182 return;
184 if (comb->n < MAX_AFF_ELTS)
186 comb->elts[comb->n].coef = scale;
187 comb->elts[comb->n].val = elt;
188 comb->n++;
189 return;
192 type = comb->type;
193 if (POINTER_TYPE_P (type))
194 type = sizetype;
196 if (scale == 1)
197 elt = fold_convert (type, elt);
198 else
199 elt = fold_build2 (MULT_EXPR, type,
200 fold_convert (type, elt),
201 wide_int_to_tree (type, scale));
203 if (comb->rest)
204 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
205 elt);
206 else
207 comb->rest = elt;
210 /* Adds CST to C. */
212 static void
213 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
215 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
218 /* Adds COMB2 to COMB1. */
220 void
221 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
223 unsigned i;
225 aff_combination_add_cst (comb1, comb2->offset);
226 for (i = 0; i < comb2->n; i++)
227 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
228 if (comb2->rest)
229 aff_combination_add_elt (comb1, comb2->rest, 1);
232 /* Converts affine combination COMB to TYPE. */
234 void
235 aff_combination_convert (aff_tree *comb, tree type)
237 unsigned i, j;
238 tree comb_type = comb->type;
240 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
242 tree val = fold_convert (type, aff_combination_to_tree (comb));
243 tree_to_aff_combination (val, type, comb);
244 return;
247 comb->type = type;
248 if (comb->rest && !POINTER_TYPE_P (type))
249 comb->rest = fold_convert (type, comb->rest);
251 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
252 return;
254 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
255 for (i = j = 0; i < comb->n; i++)
257 if (comb->elts[i].coef == 0)
258 continue;
259 comb->elts[j].coef = comb->elts[i].coef;
260 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
261 j++;
264 comb->n = j;
265 if (comb->n < MAX_AFF_ELTS && comb->rest)
267 comb->elts[comb->n].coef = 1;
268 comb->elts[comb->n].val = comb->rest;
269 comb->rest = NULL_TREE;
270 comb->n++;
274 /* Splits EXPR into an affine combination of parts. */
276 void
277 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
279 aff_tree tmp;
280 enum tree_code code;
281 tree cst, core, toffset;
282 HOST_WIDE_INT bitpos, bitsize;
283 machine_mode mode;
284 int unsignedp, volatilep;
286 STRIP_NOPS (expr);
288 code = TREE_CODE (expr);
289 switch (code)
291 case INTEGER_CST:
292 aff_combination_const (comb, type, wi::to_widest (expr));
293 return;
295 case POINTER_PLUS_EXPR:
296 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
297 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
298 aff_combination_add (comb, &tmp);
299 return;
301 case PLUS_EXPR:
302 case MINUS_EXPR:
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
305 if (code == MINUS_EXPR)
306 aff_combination_scale (&tmp, -1);
307 aff_combination_add (comb, &tmp);
308 return;
310 case MULT_EXPR:
311 cst = TREE_OPERAND (expr, 1);
312 if (TREE_CODE (cst) != INTEGER_CST)
313 break;
314 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 aff_combination_scale (comb, wi::to_widest (cst));
316 return;
318 case NEGATE_EXPR:
319 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
320 aff_combination_scale (comb, -1);
321 return;
323 case BIT_NOT_EXPR:
324 /* ~x = -x - 1 */
325 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
326 aff_combination_scale (comb, -1);
327 aff_combination_add_cst (comb, -1);
328 return;
330 case ADDR_EXPR:
331 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
332 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
334 expr = TREE_OPERAND (expr, 0);
335 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
336 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
337 aff_combination_add (comb, &tmp);
338 return;
340 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
341 &toffset, &mode, &unsignedp, &volatilep,
342 false);
343 if (bitpos % BITS_PER_UNIT != 0)
344 break;
345 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
346 if (TREE_CODE (core) == MEM_REF)
348 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
349 core = TREE_OPERAND (core, 0);
351 else
352 core = build_fold_addr_expr (core);
354 if (TREE_CODE (core) == ADDR_EXPR)
355 aff_combination_add_elt (comb, core, 1);
356 else
358 tree_to_aff_combination (core, type, &tmp);
359 aff_combination_add (comb, &tmp);
361 if (toffset)
363 tree_to_aff_combination (toffset, type, &tmp);
364 aff_combination_add (comb, &tmp);
366 return;
368 case MEM_REF:
369 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
370 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
371 type, comb);
372 else if (integer_zerop (TREE_OPERAND (expr, 1)))
374 aff_combination_elt (comb, type, expr);
375 return;
377 else
378 aff_combination_elt (comb, type,
379 build2 (MEM_REF, TREE_TYPE (expr),
380 TREE_OPERAND (expr, 0),
381 build_int_cst
382 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
383 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
384 aff_combination_add (comb, &tmp);
385 return;
387 default:
388 break;
391 aff_combination_elt (comb, type, expr);
394 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
395 combination COMB. */
397 static tree
398 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
399 aff_tree *comb ATTRIBUTE_UNUSED)
401 enum tree_code code;
402 tree type1 = type;
403 if (POINTER_TYPE_P (type))
404 type1 = sizetype;
406 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
408 if (scale == -1
409 && POINTER_TYPE_P (TREE_TYPE (elt)))
411 elt = convert_to_ptrofftype (elt);
412 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
413 scale = 1;
416 if (scale == 1)
418 if (!expr)
420 if (POINTER_TYPE_P (TREE_TYPE (elt)))
421 return elt;
422 else
423 return fold_convert (type1, elt);
426 if (POINTER_TYPE_P (TREE_TYPE (expr)))
427 return fold_build_pointer_plus (expr, elt);
428 if (POINTER_TYPE_P (TREE_TYPE (elt)))
429 return fold_build_pointer_plus (elt, expr);
430 return fold_build2 (PLUS_EXPR, type1,
431 expr, fold_convert (type1, elt));
434 if (scale == -1)
436 if (!expr)
437 return fold_build1 (NEGATE_EXPR, type1,
438 fold_convert (type1, elt));
440 if (POINTER_TYPE_P (TREE_TYPE (expr)))
442 elt = convert_to_ptrofftype (elt);
443 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
444 return fold_build_pointer_plus (expr, elt);
446 return fold_build2 (MINUS_EXPR, type1,
447 expr, fold_convert (type1, elt));
450 elt = fold_convert (type1, elt);
451 if (!expr)
452 return fold_build2 (MULT_EXPR, type1, elt,
453 wide_int_to_tree (type1, scale));
455 if (wi::neg_p (scale))
457 code = MINUS_EXPR;
458 scale = -scale;
460 else
461 code = PLUS_EXPR;
463 elt = fold_build2 (MULT_EXPR, type1, elt,
464 wide_int_to_tree (type1, scale));
465 if (POINTER_TYPE_P (TREE_TYPE (expr)))
467 if (code == MINUS_EXPR)
468 elt = fold_build1 (NEGATE_EXPR, type1, elt);
469 return fold_build_pointer_plus (expr, elt);
471 return fold_build2 (code, type1, expr, elt);
474 /* Makes tree from the affine combination COMB. */
476 tree
477 aff_combination_to_tree (aff_tree *comb)
479 tree type = comb->type;
480 tree expr = NULL_TREE;
481 unsigned i;
482 widest_int off, sgn;
483 tree type1 = type;
484 if (POINTER_TYPE_P (type))
485 type1 = sizetype;
487 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
489 for (i = 0; i < comb->n; i++)
490 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
491 comb);
493 if (comb->rest)
494 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
496 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
497 unsigned. */
498 if (wi::neg_p (comb->offset))
500 off = -comb->offset;
501 sgn = -1;
503 else
505 off = comb->offset;
506 sgn = 1;
508 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
509 comb);
512 /* Copies the tree elements of COMB to ensure that they are not shared. */
514 void
515 unshare_aff_combination (aff_tree *comb)
517 unsigned i;
519 for (i = 0; i < comb->n; i++)
520 comb->elts[i].val = unshare_expr (comb->elts[i].val);
521 if (comb->rest)
522 comb->rest = unshare_expr (comb->rest);
525 /* Remove M-th element from COMB. */
527 void
528 aff_combination_remove_elt (aff_tree *comb, unsigned m)
530 comb->n--;
531 if (m <= comb->n)
532 comb->elts[m] = comb->elts[comb->n];
533 if (comb->rest)
535 comb->elts[comb->n].coef = 1;
536 comb->elts[comb->n].val = comb->rest;
537 comb->rest = NULL_TREE;
538 comb->n++;
542 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
543 C * COEF is added to R. */
546 static void
547 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
548 aff_tree *r)
550 unsigned i;
551 tree aval, type;
553 for (i = 0; i < c->n; i++)
555 aval = c->elts[i].val;
556 if (val)
558 type = TREE_TYPE (aval);
559 aval = fold_build2 (MULT_EXPR, type, aval,
560 fold_convert (type, val));
563 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
566 if (c->rest)
568 aval = c->rest;
569 if (val)
571 type = TREE_TYPE (aval);
572 aval = fold_build2 (MULT_EXPR, type, aval,
573 fold_convert (type, val));
576 aff_combination_add_elt (r, aval, coef);
579 if (val)
580 aff_combination_add_elt (r, val, coef * c->offset);
581 else
582 aff_combination_add_cst (r, coef * c->offset);
585 /* Multiplies C1 by C2, storing the result to R */
587 void
588 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
590 unsigned i;
591 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
593 aff_combination_zero (r, c1->type);
595 for (i = 0; i < c2->n; i++)
596 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
597 if (c2->rest)
598 aff_combination_add_product (c1, 1, c2->rest, r);
599 aff_combination_add_product (c1, c2->offset, NULL, r);
602 /* Returns the element of COMB whose value is VAL, or NULL if no such
603 element exists. If IDX is not NULL, it is set to the index of VAL in
604 COMB. */
606 static struct aff_comb_elt *
607 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
609 unsigned i;
611 for (i = 0; i < comb->n; i++)
612 if (operand_equal_p (comb->elts[i].val, val, 0))
614 if (idx)
615 *idx = i;
617 return &comb->elts[i];
620 return NULL;
623 /* Element of the cache that maps ssa name NAME to its expanded form
624 as an affine expression EXPANSION. */
626 struct name_expansion
628 aff_tree expansion;
630 /* True if the expansion for the name is just being generated. */
631 unsigned in_progress : 1;
634 /* Expands SSA names in COMB recursively. CACHE is used to cache the
635 results. */
637 void
638 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
639 hash_map<tree, name_expansion *> **cache)
641 unsigned i;
642 aff_tree to_add, current, curre;
643 tree e, rhs;
644 gimple def;
645 widest_int scale;
646 struct name_expansion *exp;
648 aff_combination_zero (&to_add, comb->type);
649 for (i = 0; i < comb->n; i++)
651 tree type, name;
652 enum tree_code code;
654 e = comb->elts[i].val;
655 type = TREE_TYPE (e);
656 name = e;
657 /* Look through some conversions. */
658 if (CONVERT_EXPR_P (e)
659 && (TYPE_PRECISION (type)
660 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
661 name = TREE_OPERAND (e, 0);
662 if (TREE_CODE (name) != SSA_NAME)
663 continue;
664 def = SSA_NAME_DEF_STMT (name);
665 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
666 continue;
668 code = gimple_assign_rhs_code (def);
669 if (code != SSA_NAME
670 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
671 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
672 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
673 continue;
675 /* We do not know whether the reference retains its value at the
676 place where the expansion is used. */
677 if (TREE_CODE_CLASS (code) == tcc_reference)
678 continue;
680 if (!*cache)
681 *cache = new hash_map<tree, name_expansion *>;
682 name_expansion **slot = &(*cache)->get_or_insert (e);
683 exp = *slot;
685 if (!exp)
687 exp = XNEW (struct name_expansion);
688 exp->in_progress = 1;
689 *slot = exp;
690 /* In principle this is a generally valid folding, but
691 it is not unconditionally an optimization, so do it
692 here and not in fold_unary. */
693 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
694 than the type of X and overflow for the type of X is
695 undefined. */
696 if (e != name
697 && INTEGRAL_TYPE_P (type)
698 && INTEGRAL_TYPE_P (TREE_TYPE (name))
699 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
700 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
701 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
702 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
703 rhs = fold_build2 (code, type,
704 fold_convert (type, gimple_assign_rhs1 (def)),
705 fold_convert (type, gimple_assign_rhs2 (def)));
706 else
708 rhs = gimple_assign_rhs_to_tree (def);
709 if (e != name)
710 rhs = fold_convert (type, rhs);
712 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
713 exp->expansion = current;
714 exp->in_progress = 0;
716 else
718 /* Since we follow the definitions in the SSA form, we should not
719 enter a cycle unless we pass through a phi node. */
720 gcc_assert (!exp->in_progress);
721 current = exp->expansion;
724 /* Accumulate the new terms to TO_ADD, so that we do not modify
725 COMB while traversing it; include the term -coef * E, to remove
726 it from COMB. */
727 scale = comb->elts[i].coef;
728 aff_combination_zero (&curre, comb->type);
729 aff_combination_add_elt (&curre, e, -scale);
730 aff_combination_scale (&current, scale);
731 aff_combination_add (&to_add, &current);
732 aff_combination_add (&to_add, &curre);
734 aff_combination_add (comb, &to_add);
737 /* Similar to tree_to_aff_combination, but follows SSA name definitions
738 and expands them recursively. CACHE is used to cache the expansions
739 of the ssa names, to avoid exponential time complexity for cases
740 like
742 a1 = a0 + a0;
743 a2 = a1 + a1;
744 a3 = a2 + a2;
745 ... */
747 void
748 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
749 hash_map<tree, name_expansion *> **cache)
751 tree_to_aff_combination (expr, type, comb);
752 aff_combination_expand (comb, cache);
755 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
756 hash_map::traverse. */
758 bool
759 free_name_expansion (tree const &, name_expansion **value, void *)
761 free (*value);
762 return true;
765 /* Frees memory allocated for the CACHE used by
766 tree_to_aff_combination_expand. */
768 void
769 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
771 if (!*cache)
772 return;
774 (*cache)->traverse<void *, free_name_expansion> (NULL);
775 delete (*cache);
776 *cache = NULL;
779 /* If VAL != CST * DIV for any constant CST, returns false.
780 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
781 and if they are different, returns false. Finally, if neither of these
782 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
783 is set to true. */
785 static bool
786 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
787 bool *mult_set, widest_int *mult)
789 widest_int rem, cst;
791 if (val == 0)
793 if (*mult_set && mult != 0)
794 return false;
795 *mult_set = true;
796 *mult = 0;
797 return true;
800 if (div == 0)
801 return false;
803 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
804 return false;
806 if (*mult_set && *mult != cst)
807 return false;
809 *mult_set = true;
810 *mult = cst;
811 return true;
814 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
815 X is stored to MULT. */
817 bool
818 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
819 widest_int *mult)
821 bool mult_set = false;
822 unsigned i;
824 if (val->n == 0 && val->offset == 0)
826 *mult = 0;
827 return true;
829 if (val->n != div->n)
830 return false;
832 if (val->rest || div->rest)
833 return false;
835 if (!wide_int_constant_multiple_p (val->offset, div->offset,
836 &mult_set, mult))
837 return false;
839 for (i = 0; i < div->n; i++)
841 struct aff_comb_elt *elt
842 = aff_combination_find_elt (val, div->elts[i].val, NULL);
843 if (!elt)
844 return false;
845 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
846 &mult_set, mult))
847 return false;
850 gcc_assert (mult_set);
851 return true;
854 /* Prints the affine VAL to the FILE. */
856 static void
857 print_aff (FILE *file, aff_tree *val)
859 unsigned i;
860 signop sgn = TYPE_SIGN (val->type);
861 if (POINTER_TYPE_P (val->type))
862 sgn = SIGNED;
863 fprintf (file, "{\n type = ");
864 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
865 fprintf (file, "\n offset = ");
866 print_dec (val->offset, file, sgn);
867 if (val->n > 0)
869 fprintf (file, "\n elements = {\n");
870 for (i = 0; i < val->n; i++)
872 fprintf (file, " [%d] = ", i);
873 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
875 fprintf (file, " * ");
876 print_dec (val->elts[i].coef, file, sgn);
877 if (i != val->n - 1)
878 fprintf (file, ", \n");
880 fprintf (file, "\n }");
882 if (val->rest)
884 fprintf (file, "\n rest = ");
885 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
887 fprintf (file, "\n}");
890 /* Prints the affine VAL to the standard error, used for debugging. */
892 DEBUG_FUNCTION void
893 debug_aff (aff_tree *val)
895 print_aff (stderr, val);
896 fprintf (stderr, "\n");
899 /* Computes address of the reference REF in ADDR. The size of the accessed
900 location is stored to SIZE. Returns the ultimate containing object to
901 which REF refers. */
903 tree
904 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
906 HOST_WIDE_INT bitsize, bitpos;
907 tree toff;
908 machine_mode mode;
909 int uns, vol;
910 aff_tree tmp;
911 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
912 &uns, &vol, false);
913 tree base_addr = build_fold_addr_expr (base);
915 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
917 tree_to_aff_combination (base_addr, sizetype, addr);
919 if (toff)
921 tree_to_aff_combination (toff, sizetype, &tmp);
922 aff_combination_add (addr, &tmp);
925 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
926 aff_combination_add (addr, &tmp);
928 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
930 return base;
933 /* Returns true if a region of size SIZE1 at position 0 and a region of
934 size SIZE2 at position DIFF cannot overlap. */
936 bool
937 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
938 const widest_int &size2)
940 /* Unless the difference is a constant, we fail. */
941 if (diff->n != 0)
942 return false;
944 if (wi::neg_p (diff->offset))
946 /* The second object is before the first one, we succeed if the last
947 element of the second object is before the start of the first one. */
948 return wi::neg_p (diff->offset + size2 - 1);
950 else
952 /* We succeed if the second object starts after the first one ends. */
953 return wi::les_p (size1, diff->offset);