Simplify convert_modes, ignoring invalid old modes for CONST_INTs.
[official-gcc.git] / gcc / tree-affine.c
bloba8a039f9809d1e402d372fd2cfdaf8d6ad44d438
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
27 #include "gimple.h"
28 #include "flags.h"
29 #include "dumpfile.h"
30 #include "wide-int-print.h"
33 /* Extends CST as appropriate for the affine combinations COMB. */
35 widest_int
36 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
38 return wi::sext (cst, TYPE_PRECISION (comb->type));
41 /* Initializes affine combination COMB so that its value is zero in TYPE. */
43 static void
44 aff_combination_zero (aff_tree *comb, tree type)
46 int i;
47 comb->type = type;
48 comb->offset = 0;
49 comb->n = 0;
50 for (i = 0; i < MAX_AFF_ELTS; i++)
51 comb->elts[i].coef = 0;
52 comb->rest = NULL_TREE;
55 /* Sets COMB to CST. */
57 void
58 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
60 aff_combination_zero (comb, type);
61 comb->offset = wide_int_ext_for_comb (cst, comb);;
64 /* Sets COMB to single element ELT. */
66 void
67 aff_combination_elt (aff_tree *comb, tree type, tree elt)
69 aff_combination_zero (comb, type);
71 comb->n = 1;
72 comb->elts[0].val = elt;
73 comb->elts[0].coef = 1;
76 /* Scales COMB by SCALE. */
78 void
79 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
81 unsigned i, j;
83 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
84 if (scale == 1)
85 return;
87 if (scale == 0)
89 aff_combination_zero (comb, comb->type);
90 return;
93 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
94 for (i = 0, j = 0; i < comb->n; i++)
96 widest_int new_coef
97 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
98 /* A coefficient may become zero due to overflow. Remove the zero
99 elements. */
100 if (new_coef == 0)
101 continue;
102 comb->elts[j].coef = new_coef;
103 comb->elts[j].val = comb->elts[i].val;
104 j++;
106 comb->n = j;
108 if (comb->rest)
110 tree type = comb->type;
111 if (POINTER_TYPE_P (type))
112 type = sizetype;
113 if (comb->n < MAX_AFF_ELTS)
115 comb->elts[comb->n].coef = scale;
116 comb->elts[comb->n].val = comb->rest;
117 comb->rest = NULL_TREE;
118 comb->n++;
120 else
121 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
122 wide_int_to_tree (type, scale));
126 /* Adds ELT * SCALE to COMB. */
128 void
129 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
131 unsigned i;
132 tree type;
134 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
135 if (scale == 0)
136 return;
138 for (i = 0; i < comb->n; i++)
139 if (operand_equal_p (comb->elts[i].val, elt, 0))
141 widest_int new_coef
142 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
143 if (new_coef != 0)
145 comb->elts[i].coef = new_coef;
146 return;
149 comb->n--;
150 comb->elts[i] = comb->elts[comb->n];
152 if (comb->rest)
154 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
155 comb->elts[comb->n].coef = 1;
156 comb->elts[comb->n].val = comb->rest;
157 comb->rest = NULL_TREE;
158 comb->n++;
160 return;
162 if (comb->n < MAX_AFF_ELTS)
164 comb->elts[comb->n].coef = scale;
165 comb->elts[comb->n].val = elt;
166 comb->n++;
167 return;
170 type = comb->type;
171 if (POINTER_TYPE_P (type))
172 type = sizetype;
174 if (scale == 1)
175 elt = fold_convert (type, elt);
176 else
177 elt = fold_build2 (MULT_EXPR, type,
178 fold_convert (type, elt),
179 wide_int_to_tree (type, scale));
181 if (comb->rest)
182 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
183 elt);
184 else
185 comb->rest = elt;
188 /* Adds CST to C. */
190 static void
191 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
193 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
196 /* Adds COMB2 to COMB1. */
198 void
199 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
201 unsigned i;
203 aff_combination_add_cst (comb1, comb2->offset);
204 for (i = 0; i < comb2->n; i++)
205 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
206 if (comb2->rest)
207 aff_combination_add_elt (comb1, comb2->rest, 1);
210 /* Converts affine combination COMB to TYPE. */
212 void
213 aff_combination_convert (aff_tree *comb, tree type)
215 unsigned i, j;
216 tree comb_type = comb->type;
218 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
220 tree val = fold_convert (type, aff_combination_to_tree (comb));
221 tree_to_aff_combination (val, type, comb);
222 return;
225 comb->type = type;
226 if (comb->rest && !POINTER_TYPE_P (type))
227 comb->rest = fold_convert (type, comb->rest);
229 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
230 return;
232 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
233 for (i = j = 0; i < comb->n; i++)
235 if (comb->elts[i].coef == 0)
236 continue;
237 comb->elts[j].coef = comb->elts[i].coef;
238 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
239 j++;
242 comb->n = j;
243 if (comb->n < MAX_AFF_ELTS && comb->rest)
245 comb->elts[comb->n].coef = 1;
246 comb->elts[comb->n].val = comb->rest;
247 comb->rest = NULL_TREE;
248 comb->n++;
252 /* Splits EXPR into an affine combination of parts. */
254 void
255 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
257 aff_tree tmp;
258 enum tree_code code;
259 tree cst, core, toffset;
260 HOST_WIDE_INT bitpos, bitsize;
261 enum machine_mode mode;
262 int unsignedp, volatilep;
264 STRIP_NOPS (expr);
266 code = TREE_CODE (expr);
267 switch (code)
269 case INTEGER_CST:
270 aff_combination_const (comb, type, wi::to_widest (expr));
271 return;
273 case POINTER_PLUS_EXPR:
274 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
275 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
276 aff_combination_add (comb, &tmp);
277 return;
279 case PLUS_EXPR:
280 case MINUS_EXPR:
281 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
282 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
283 if (code == MINUS_EXPR)
284 aff_combination_scale (&tmp, -1);
285 aff_combination_add (comb, &tmp);
286 return;
288 case MULT_EXPR:
289 cst = TREE_OPERAND (expr, 1);
290 if (TREE_CODE (cst) != INTEGER_CST)
291 break;
292 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
293 aff_combination_scale (comb, wi::to_widest (cst));
294 return;
296 case NEGATE_EXPR:
297 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
298 aff_combination_scale (comb, -1);
299 return;
301 case BIT_NOT_EXPR:
302 /* ~x = -x - 1 */
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304 aff_combination_scale (comb, -1);
305 aff_combination_add_cst (comb, -1);
306 return;
308 case ADDR_EXPR:
309 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
310 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
312 expr = TREE_OPERAND (expr, 0);
313 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
314 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
315 aff_combination_add (comb, &tmp);
316 return;
318 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
319 &toffset, &mode, &unsignedp, &volatilep,
320 false);
321 if (bitpos % BITS_PER_UNIT != 0)
322 break;
323 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
324 core = build_fold_addr_expr (core);
325 if (TREE_CODE (core) == ADDR_EXPR)
326 aff_combination_add_elt (comb, core, 1);
327 else
329 tree_to_aff_combination (core, type, &tmp);
330 aff_combination_add (comb, &tmp);
332 if (toffset)
334 tree_to_aff_combination (toffset, type, &tmp);
335 aff_combination_add (comb, &tmp);
337 return;
339 case MEM_REF:
340 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
341 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
342 type, comb);
343 else if (integer_zerop (TREE_OPERAND (expr, 1)))
345 aff_combination_elt (comb, type, expr);
346 return;
348 else
349 aff_combination_elt (comb, type,
350 build2 (MEM_REF, TREE_TYPE (expr),
351 TREE_OPERAND (expr, 0),
352 build_int_cst
353 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
354 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
355 aff_combination_add (comb, &tmp);
356 return;
358 default:
359 break;
362 aff_combination_elt (comb, type, expr);
365 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
366 combination COMB. */
368 static tree
369 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
370 aff_tree *comb ATTRIBUTE_UNUSED)
372 enum tree_code code;
373 tree type1 = type;
374 if (POINTER_TYPE_P (type))
375 type1 = sizetype;
377 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
379 if (scale == -1
380 && POINTER_TYPE_P (TREE_TYPE (elt)))
382 elt = convert_to_ptrofftype (elt);
383 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
384 scale = 1;
387 if (scale == 1)
389 if (!expr)
391 if (POINTER_TYPE_P (TREE_TYPE (elt)))
392 return elt;
393 else
394 return fold_convert (type1, elt);
397 if (POINTER_TYPE_P (TREE_TYPE (expr)))
398 return fold_build_pointer_plus (expr, elt);
399 if (POINTER_TYPE_P (TREE_TYPE (elt)))
400 return fold_build_pointer_plus (elt, expr);
401 return fold_build2 (PLUS_EXPR, type1,
402 expr, fold_convert (type1, elt));
405 if (scale == -1)
407 if (!expr)
408 return fold_build1 (NEGATE_EXPR, type1,
409 fold_convert (type1, elt));
411 if (POINTER_TYPE_P (TREE_TYPE (expr)))
413 elt = convert_to_ptrofftype (elt);
414 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
415 return fold_build_pointer_plus (expr, elt);
417 return fold_build2 (MINUS_EXPR, type1,
418 expr, fold_convert (type1, elt));
421 elt = fold_convert (type1, elt);
422 if (!expr)
423 return fold_build2 (MULT_EXPR, type1, elt,
424 wide_int_to_tree (type1, scale));
426 if (wi::neg_p (scale))
428 code = MINUS_EXPR;
429 scale = -scale;
431 else
432 code = PLUS_EXPR;
434 elt = fold_build2 (MULT_EXPR, type1, elt,
435 wide_int_to_tree (type1, scale));
436 if (POINTER_TYPE_P (TREE_TYPE (expr)))
438 if (code == MINUS_EXPR)
439 elt = fold_build1 (NEGATE_EXPR, type1, elt);
440 return fold_build_pointer_plus (expr, elt);
442 return fold_build2 (code, type1, expr, elt);
445 /* Makes tree from the affine combination COMB. */
447 tree
448 aff_combination_to_tree (aff_tree *comb)
450 tree type = comb->type;
451 tree expr = NULL_TREE;
452 unsigned i;
453 widest_int off, sgn;
454 tree type1 = type;
455 if (POINTER_TYPE_P (type))
456 type1 = sizetype;
458 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
460 for (i = 0; i < comb->n; i++)
461 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
462 comb);
464 if (comb->rest)
465 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
467 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
468 unsigned. */
469 if (wi::neg_p (comb->offset))
471 off = -comb->offset;
472 sgn = -1;
474 else
476 off = comb->offset;
477 sgn = 1;
479 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
480 comb);
483 /* Copies the tree elements of COMB to ensure that they are not shared. */
485 void
486 unshare_aff_combination (aff_tree *comb)
488 unsigned i;
490 for (i = 0; i < comb->n; i++)
491 comb->elts[i].val = unshare_expr (comb->elts[i].val);
492 if (comb->rest)
493 comb->rest = unshare_expr (comb->rest);
496 /* Remove M-th element from COMB. */
498 void
499 aff_combination_remove_elt (aff_tree *comb, unsigned m)
501 comb->n--;
502 if (m <= comb->n)
503 comb->elts[m] = comb->elts[comb->n];
504 if (comb->rest)
506 comb->elts[comb->n].coef = 1;
507 comb->elts[comb->n].val = comb->rest;
508 comb->rest = NULL_TREE;
509 comb->n++;
513 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
514 C * COEF is added to R. */
517 static void
518 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
519 aff_tree *r)
521 unsigned i;
522 tree aval, type;
524 for (i = 0; i < c->n; i++)
526 aval = c->elts[i].val;
527 if (val)
529 type = TREE_TYPE (aval);
530 aval = fold_build2 (MULT_EXPR, type, aval,
531 fold_convert (type, val));
534 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
537 if (c->rest)
539 aval = c->rest;
540 if (val)
542 type = TREE_TYPE (aval);
543 aval = fold_build2 (MULT_EXPR, type, aval,
544 fold_convert (type, val));
547 aff_combination_add_elt (r, aval, coef);
550 if (val)
551 aff_combination_add_elt (r, val, coef * c->offset);
552 else
553 aff_combination_add_cst (r, coef * c->offset);
556 /* Multiplies C1 by C2, storing the result to R */
558 void
559 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
561 unsigned i;
562 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
564 aff_combination_zero (r, c1->type);
566 for (i = 0; i < c2->n; i++)
567 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
568 if (c2->rest)
569 aff_combination_add_product (c1, 1, c2->rest, r);
570 aff_combination_add_product (c1, c2->offset, NULL, r);
573 /* Returns the element of COMB whose value is VAL, or NULL if no such
574 element exists. If IDX is not NULL, it is set to the index of VAL in
575 COMB. */
577 static struct aff_comb_elt *
578 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
580 unsigned i;
582 for (i = 0; i < comb->n; i++)
583 if (operand_equal_p (comb->elts[i].val, val, 0))
585 if (idx)
586 *idx = i;
588 return &comb->elts[i];
591 return NULL;
594 /* Element of the cache that maps ssa name NAME to its expanded form
595 as an affine expression EXPANSION. */
597 struct name_expansion
599 aff_tree expansion;
601 /* True if the expansion for the name is just being generated. */
602 unsigned in_progress : 1;
605 /* Expands SSA names in COMB recursively. CACHE is used to cache the
606 results. */
608 void
609 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
610 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
612 unsigned i;
613 aff_tree to_add, current, curre;
614 tree e, rhs;
615 gimple def;
616 widest_int scale;
617 void **slot;
618 struct name_expansion *exp;
620 aff_combination_zero (&to_add, comb->type);
621 for (i = 0; i < comb->n; i++)
623 tree type, name;
624 enum tree_code code;
626 e = comb->elts[i].val;
627 type = TREE_TYPE (e);
628 name = e;
629 /* Look through some conversions. */
630 if (TREE_CODE (e) == NOP_EXPR
631 && (TYPE_PRECISION (type)
632 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
633 name = TREE_OPERAND (e, 0);
634 if (TREE_CODE (name) != SSA_NAME)
635 continue;
636 def = SSA_NAME_DEF_STMT (name);
637 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
638 continue;
640 code = gimple_assign_rhs_code (def);
641 if (code != SSA_NAME
642 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
643 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
644 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
645 continue;
647 /* We do not know whether the reference retains its value at the
648 place where the expansion is used. */
649 if (TREE_CODE_CLASS (code) == tcc_reference)
650 continue;
652 if (!*cache)
653 *cache = pointer_map_create ();
654 slot = pointer_map_insert (*cache, e);
655 exp = (struct name_expansion *) *slot;
657 if (!exp)
659 exp = XNEW (struct name_expansion);
660 exp->in_progress = 1;
661 *slot = exp;
662 /* In principle this is a generally valid folding, but
663 it is not unconditionally an optimization, so do it
664 here and not in fold_unary. */
665 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
666 than the type of X and overflow for the type of X is
667 undefined. */
668 if (e != name
669 && INTEGRAL_TYPE_P (type)
670 && INTEGRAL_TYPE_P (TREE_TYPE (name))
671 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
672 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
673 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
674 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
675 rhs = fold_build2 (code, type,
676 fold_convert (type, gimple_assign_rhs1 (def)),
677 fold_convert (type, gimple_assign_rhs2 (def)));
678 else
680 rhs = gimple_assign_rhs_to_tree (def);
681 if (e != name)
682 rhs = fold_convert (type, rhs);
684 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
685 exp->expansion = current;
686 exp->in_progress = 0;
688 else
690 /* Since we follow the definitions in the SSA form, we should not
691 enter a cycle unless we pass through a phi node. */
692 gcc_assert (!exp->in_progress);
693 current = exp->expansion;
696 /* Accumulate the new terms to TO_ADD, so that we do not modify
697 COMB while traversing it; include the term -coef * E, to remove
698 it from COMB. */
699 scale = comb->elts[i].coef;
700 aff_combination_zero (&curre, comb->type);
701 aff_combination_add_elt (&curre, e, -scale);
702 aff_combination_scale (&current, scale);
703 aff_combination_add (&to_add, &current);
704 aff_combination_add (&to_add, &curre);
706 aff_combination_add (comb, &to_add);
709 /* Similar to tree_to_aff_combination, but follows SSA name definitions
710 and expands them recursively. CACHE is used to cache the expansions
711 of the ssa names, to avoid exponential time complexity for cases
712 like
714 a1 = a0 + a0;
715 a2 = a1 + a1;
716 a3 = a2 + a2;
717 ... */
719 void
720 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
721 struct pointer_map_t **cache)
723 tree_to_aff_combination (expr, type, comb);
724 aff_combination_expand (comb, cache);
727 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
728 pointer_map_traverse. */
730 static bool
731 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
732 void *data ATTRIBUTE_UNUSED)
734 struct name_expansion *const exp = (struct name_expansion *) *value;
736 free (exp);
737 return true;
740 /* Frees memory allocated for the CACHE used by
741 tree_to_aff_combination_expand. */
743 void
744 free_affine_expand_cache (struct pointer_map_t **cache)
746 if (!*cache)
747 return;
749 pointer_map_traverse (*cache, free_name_expansion, NULL);
750 pointer_map_destroy (*cache);
751 *cache = NULL;
754 /* If VAL != CST * DIV for any constant CST, returns false.
755 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
756 and if they are different, returns false. Finally, if neither of these
757 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
758 is set to true. */
760 static bool
761 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
762 bool *mult_set, widest_int *mult)
764 widest_int rem, cst;
766 if (val == 0)
768 if (*mult_set && mult != 0)
769 return false;
770 *mult_set = true;
771 *mult = 0;
772 return true;
775 if (div == 0)
776 return false;
778 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
779 return false;
781 if (*mult_set && *mult != cst)
782 return false;
784 *mult_set = true;
785 *mult = cst;
786 return true;
789 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
790 X is stored to MULT. */
792 bool
793 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
794 widest_int *mult)
796 bool mult_set = false;
797 unsigned i;
799 if (val->n == 0 && val->offset == 0)
801 *mult = 0;
802 return true;
804 if (val->n != div->n)
805 return false;
807 if (val->rest || div->rest)
808 return false;
810 if (!wide_int_constant_multiple_p (val->offset, div->offset,
811 &mult_set, mult))
812 return false;
814 for (i = 0; i < div->n; i++)
816 struct aff_comb_elt *elt
817 = aff_combination_find_elt (val, div->elts[i].val, NULL);
818 if (!elt)
819 return false;
820 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
821 &mult_set, mult))
822 return false;
825 gcc_assert (mult_set);
826 return true;
829 /* Prints the affine VAL to the FILE. */
831 static void
832 print_aff (FILE *file, aff_tree *val)
834 unsigned i;
835 signop sgn = TYPE_SIGN (val->type);
836 if (POINTER_TYPE_P (val->type))
837 sgn = SIGNED;
838 fprintf (file, "{\n type = ");
839 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
840 fprintf (file, "\n offset = ");
841 print_dec (val->offset, file, sgn);
842 if (val->n > 0)
844 fprintf (file, "\n elements = {\n");
845 for (i = 0; i < val->n; i++)
847 fprintf (file, " [%d] = ", i);
848 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
850 fprintf (file, " * ");
851 print_dec (val->elts[i].coef, file, sgn);
852 if (i != val->n - 1)
853 fprintf (file, ", \n");
855 fprintf (file, "\n }");
857 if (val->rest)
859 fprintf (file, "\n rest = ");
860 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
862 fprintf (file, "\n}");
865 /* Prints the affine VAL to the standard error, used for debugging. */
867 DEBUG_FUNCTION void
868 debug_aff (aff_tree *val)
870 print_aff (stderr, val);
871 fprintf (stderr, "\n");
874 /* Returns address of the reference REF in ADDR. The size of the accessed
875 location is stored to SIZE. */
877 void
878 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
880 HOST_WIDE_INT bitsize, bitpos;
881 tree toff;
882 enum machine_mode mode;
883 int uns, vol;
884 aff_tree tmp;
885 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
886 &uns, &vol, false);
887 tree base_addr = build_fold_addr_expr (base);
889 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
891 tree_to_aff_combination (base_addr, sizetype, addr);
893 if (toff)
895 tree_to_aff_combination (toff, sizetype, &tmp);
896 aff_combination_add (addr, &tmp);
899 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
900 aff_combination_add (addr, &tmp);
902 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
905 /* Returns true if a region of size SIZE1 at position 0 and a region of
906 size SIZE2 at position DIFF cannot overlap. */
908 bool
909 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
910 const widest_int &size2)
912 /* Unless the difference is a constant, we fail. */
913 if (diff->n != 0)
914 return false;
916 if (wi::neg_p (diff->offset))
918 /* The second object is before the first one, we succeed if the last
919 element of the second object is before the start of the first one. */
920 return wi::neg_p (diff->offset + size2 - 1);
922 else
924 /* We succeed if the second object starts after the first one ends. */
925 return wi::les_p (size1, diff->offset);