* expmed.c (flip_storage_order): Deal with complex modes specially.
[official-gcc.git] / gcc / tree-affine.c
blob2cebe923ee98cb26c008a5ccf94334d682ccc650
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "input.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "function.h"
32 #include "rtl.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "expmed.h"
36 #include "dojump.h"
37 #include "explow.h"
38 #include "calls.h"
39 #include "emit-rtl.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "tree-pretty-print.h"
44 #include "tree-affine.h"
45 #include "predict.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimplify.h"
53 #include "dumpfile.h"
54 #include "cfgexpand.h"
55 #include "wide-int-print.h"
57 /* Extends CST as appropriate for the affine combinations COMB. */
59 widest_int
60 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
62 return wi::sext (cst, TYPE_PRECISION (comb->type));
65 /* Initializes affine combination COMB so that its value is zero in TYPE. */
67 static void
68 aff_combination_zero (aff_tree *comb, tree type)
70 int i;
71 comb->type = type;
72 comb->offset = 0;
73 comb->n = 0;
74 for (i = 0; i < MAX_AFF_ELTS; i++)
75 comb->elts[i].coef = 0;
76 comb->rest = NULL_TREE;
79 /* Sets COMB to CST. */
81 void
82 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
84 aff_combination_zero (comb, type);
85 comb->offset = wide_int_ext_for_comb (cst, comb);;
88 /* Sets COMB to single element ELT. */
90 void
91 aff_combination_elt (aff_tree *comb, tree type, tree elt)
93 aff_combination_zero (comb, type);
95 comb->n = 1;
96 comb->elts[0].val = elt;
97 comb->elts[0].coef = 1;
100 /* Scales COMB by SCALE. */
102 void
103 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
105 unsigned i, j;
107 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
108 if (scale == 1)
109 return;
111 if (scale == 0)
113 aff_combination_zero (comb, comb->type);
114 return;
117 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
118 for (i = 0, j = 0; i < comb->n; i++)
120 widest_int new_coef
121 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
122 /* A coefficient may become zero due to overflow. Remove the zero
123 elements. */
124 if (new_coef == 0)
125 continue;
126 comb->elts[j].coef = new_coef;
127 comb->elts[j].val = comb->elts[i].val;
128 j++;
130 comb->n = j;
132 if (comb->rest)
134 tree type = comb->type;
135 if (POINTER_TYPE_P (type))
136 type = sizetype;
137 if (comb->n < MAX_AFF_ELTS)
139 comb->elts[comb->n].coef = scale;
140 comb->elts[comb->n].val = comb->rest;
141 comb->rest = NULL_TREE;
142 comb->n++;
144 else
145 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
146 wide_int_to_tree (type, scale));
150 /* Adds ELT * SCALE to COMB. */
152 void
153 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
155 unsigned i;
156 tree type;
158 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
159 if (scale == 0)
160 return;
162 for (i = 0; i < comb->n; i++)
163 if (operand_equal_p (comb->elts[i].val, elt, 0))
165 widest_int new_coef
166 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
167 if (new_coef != 0)
169 comb->elts[i].coef = new_coef;
170 return;
173 comb->n--;
174 comb->elts[i] = comb->elts[comb->n];
176 if (comb->rest)
178 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
179 comb->elts[comb->n].coef = 1;
180 comb->elts[comb->n].val = comb->rest;
181 comb->rest = NULL_TREE;
182 comb->n++;
184 return;
186 if (comb->n < MAX_AFF_ELTS)
188 comb->elts[comb->n].coef = scale;
189 comb->elts[comb->n].val = elt;
190 comb->n++;
191 return;
194 type = comb->type;
195 if (POINTER_TYPE_P (type))
196 type = sizetype;
198 if (scale == 1)
199 elt = fold_convert (type, elt);
200 else
201 elt = fold_build2 (MULT_EXPR, type,
202 fold_convert (type, elt),
203 wide_int_to_tree (type, scale));
205 if (comb->rest)
206 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
207 elt);
208 else
209 comb->rest = elt;
212 /* Adds CST to C. */
214 static void
215 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
217 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
220 /* Adds COMB2 to COMB1. */
222 void
223 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
225 unsigned i;
227 aff_combination_add_cst (comb1, comb2->offset);
228 for (i = 0; i < comb2->n; i++)
229 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
230 if (comb2->rest)
231 aff_combination_add_elt (comb1, comb2->rest, 1);
234 /* Converts affine combination COMB to TYPE. */
236 void
237 aff_combination_convert (aff_tree *comb, tree type)
239 unsigned i, j;
240 tree comb_type = comb->type;
242 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
244 tree val = fold_convert (type, aff_combination_to_tree (comb));
245 tree_to_aff_combination (val, type, comb);
246 return;
249 comb->type = type;
250 if (comb->rest && !POINTER_TYPE_P (type))
251 comb->rest = fold_convert (type, comb->rest);
253 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
254 return;
256 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
257 for (i = j = 0; i < comb->n; i++)
259 if (comb->elts[i].coef == 0)
260 continue;
261 comb->elts[j].coef = comb->elts[i].coef;
262 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
263 j++;
266 comb->n = j;
267 if (comb->n < MAX_AFF_ELTS && comb->rest)
269 comb->elts[comb->n].coef = 1;
270 comb->elts[comb->n].val = comb->rest;
271 comb->rest = NULL_TREE;
272 comb->n++;
276 /* Splits EXPR into an affine combination of parts. */
278 void
279 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
281 aff_tree tmp;
282 enum tree_code code;
283 tree cst, core, toffset;
284 HOST_WIDE_INT bitpos, bitsize;
285 machine_mode mode;
286 int unsignedp, reversep, volatilep;
288 STRIP_NOPS (expr);
290 code = TREE_CODE (expr);
291 switch (code)
293 case INTEGER_CST:
294 aff_combination_const (comb, type, wi::to_widest (expr));
295 return;
297 case POINTER_PLUS_EXPR:
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
300 aff_combination_add (comb, &tmp);
301 return;
303 case PLUS_EXPR:
304 case MINUS_EXPR:
305 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
306 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
307 if (code == MINUS_EXPR)
308 aff_combination_scale (&tmp, -1);
309 aff_combination_add (comb, &tmp);
310 return;
312 case MULT_EXPR:
313 cst = TREE_OPERAND (expr, 1);
314 if (TREE_CODE (cst) != INTEGER_CST)
315 break;
316 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
317 aff_combination_scale (comb, wi::to_widest (cst));
318 return;
320 case NEGATE_EXPR:
321 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
322 aff_combination_scale (comb, -1);
323 return;
325 case BIT_NOT_EXPR:
326 /* ~x = -x - 1 */
327 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
328 aff_combination_scale (comb, -1);
329 aff_combination_add_cst (comb, -1);
330 return;
332 case ADDR_EXPR:
333 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
334 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
336 expr = TREE_OPERAND (expr, 0);
337 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
338 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
339 aff_combination_add (comb, &tmp);
340 return;
342 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
343 &toffset, &mode, &unsignedp, &reversep,
344 &volatilep, false);
345 if (bitpos % BITS_PER_UNIT != 0)
346 break;
347 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
348 if (TREE_CODE (core) == MEM_REF)
350 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
351 core = TREE_OPERAND (core, 0);
353 else
354 core = build_fold_addr_expr (core);
356 if (TREE_CODE (core) == ADDR_EXPR)
357 aff_combination_add_elt (comb, core, 1);
358 else
360 tree_to_aff_combination (core, type, &tmp);
361 aff_combination_add (comb, &tmp);
363 if (toffset)
365 tree_to_aff_combination (toffset, type, &tmp);
366 aff_combination_add (comb, &tmp);
368 return;
370 case MEM_REF:
371 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
372 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
373 type, comb);
374 else if (integer_zerop (TREE_OPERAND (expr, 1)))
376 aff_combination_elt (comb, type, expr);
377 return;
379 else
380 aff_combination_elt (comb, type,
381 build2 (MEM_REF, TREE_TYPE (expr),
382 TREE_OPERAND (expr, 0),
383 build_int_cst
384 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
385 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
386 aff_combination_add (comb, &tmp);
387 return;
389 default:
390 break;
393 aff_combination_elt (comb, type, expr);
396 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
397 combination COMB. */
399 static tree
400 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
401 aff_tree *comb ATTRIBUTE_UNUSED)
403 enum tree_code code;
404 tree type1 = type;
405 if (POINTER_TYPE_P (type))
406 type1 = sizetype;
408 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
410 if (scale == -1
411 && POINTER_TYPE_P (TREE_TYPE (elt)))
413 elt = convert_to_ptrofftype (elt);
414 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
415 scale = 1;
418 if (scale == 1)
420 if (!expr)
422 if (POINTER_TYPE_P (TREE_TYPE (elt)))
423 return elt;
424 else
425 return fold_convert (type1, elt);
428 if (POINTER_TYPE_P (TREE_TYPE (expr)))
429 return fold_build_pointer_plus (expr, elt);
430 if (POINTER_TYPE_P (TREE_TYPE (elt)))
431 return fold_build_pointer_plus (elt, expr);
432 return fold_build2 (PLUS_EXPR, type1,
433 expr, fold_convert (type1, elt));
436 if (scale == -1)
438 if (!expr)
439 return fold_build1 (NEGATE_EXPR, type1,
440 fold_convert (type1, elt));
442 if (POINTER_TYPE_P (TREE_TYPE (expr)))
444 elt = convert_to_ptrofftype (elt);
445 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
446 return fold_build_pointer_plus (expr, elt);
448 return fold_build2 (MINUS_EXPR, type1,
449 expr, fold_convert (type1, elt));
452 elt = fold_convert (type1, elt);
453 if (!expr)
454 return fold_build2 (MULT_EXPR, type1, elt,
455 wide_int_to_tree (type1, scale));
457 if (wi::neg_p (scale))
459 code = MINUS_EXPR;
460 scale = -scale;
462 else
463 code = PLUS_EXPR;
465 elt = fold_build2 (MULT_EXPR, type1, elt,
466 wide_int_to_tree (type1, scale));
467 if (POINTER_TYPE_P (TREE_TYPE (expr)))
469 if (code == MINUS_EXPR)
470 elt = fold_build1 (NEGATE_EXPR, type1, elt);
471 return fold_build_pointer_plus (expr, elt);
473 return fold_build2 (code, type1, expr, elt);
476 /* Makes tree from the affine combination COMB. */
478 tree
479 aff_combination_to_tree (aff_tree *comb)
481 tree type = comb->type;
482 tree expr = NULL_TREE;
483 unsigned i;
484 widest_int off, sgn;
485 tree type1 = type;
486 if (POINTER_TYPE_P (type))
487 type1 = sizetype;
489 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
491 for (i = 0; i < comb->n; i++)
492 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
493 comb);
495 if (comb->rest)
496 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
498 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
499 unsigned. */
500 if (wi::neg_p (comb->offset))
502 off = -comb->offset;
503 sgn = -1;
505 else
507 off = comb->offset;
508 sgn = 1;
510 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
511 comb);
514 /* Copies the tree elements of COMB to ensure that they are not shared. */
516 void
517 unshare_aff_combination (aff_tree *comb)
519 unsigned i;
521 for (i = 0; i < comb->n; i++)
522 comb->elts[i].val = unshare_expr (comb->elts[i].val);
523 if (comb->rest)
524 comb->rest = unshare_expr (comb->rest);
527 /* Remove M-th element from COMB. */
529 void
530 aff_combination_remove_elt (aff_tree *comb, unsigned m)
532 comb->n--;
533 if (m <= comb->n)
534 comb->elts[m] = comb->elts[comb->n];
535 if (comb->rest)
537 comb->elts[comb->n].coef = 1;
538 comb->elts[comb->n].val = comb->rest;
539 comb->rest = NULL_TREE;
540 comb->n++;
544 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
545 C * COEF is added to R. */
548 static void
549 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
550 aff_tree *r)
552 unsigned i;
553 tree aval, type;
555 for (i = 0; i < c->n; i++)
557 aval = c->elts[i].val;
558 if (val)
560 type = TREE_TYPE (aval);
561 aval = fold_build2 (MULT_EXPR, type, aval,
562 fold_convert (type, val));
565 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
568 if (c->rest)
570 aval = c->rest;
571 if (val)
573 type = TREE_TYPE (aval);
574 aval = fold_build2 (MULT_EXPR, type, aval,
575 fold_convert (type, val));
578 aff_combination_add_elt (r, aval, coef);
581 if (val)
582 aff_combination_add_elt (r, val, coef * c->offset);
583 else
584 aff_combination_add_cst (r, coef * c->offset);
587 /* Multiplies C1 by C2, storing the result to R */
589 void
590 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
592 unsigned i;
593 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
595 aff_combination_zero (r, c1->type);
597 for (i = 0; i < c2->n; i++)
598 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
599 if (c2->rest)
600 aff_combination_add_product (c1, 1, c2->rest, r);
601 aff_combination_add_product (c1, c2->offset, NULL, r);
604 /* Returns the element of COMB whose value is VAL, or NULL if no such
605 element exists. If IDX is not NULL, it is set to the index of VAL in
606 COMB. */
608 static struct aff_comb_elt *
609 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
611 unsigned i;
613 for (i = 0; i < comb->n; i++)
614 if (operand_equal_p (comb->elts[i].val, val, 0))
616 if (idx)
617 *idx = i;
619 return &comb->elts[i];
622 return NULL;
625 /* Element of the cache that maps ssa name NAME to its expanded form
626 as an affine expression EXPANSION. */
628 struct name_expansion
630 aff_tree expansion;
632 /* True if the expansion for the name is just being generated. */
633 unsigned in_progress : 1;
636 /* Expands SSA names in COMB recursively. CACHE is used to cache the
637 results. */
639 void
640 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
641 hash_map<tree, name_expansion *> **cache)
643 unsigned i;
644 aff_tree to_add, current, curre;
645 tree e, rhs;
646 gimple def;
647 widest_int scale;
648 struct name_expansion *exp;
650 aff_combination_zero (&to_add, comb->type);
651 for (i = 0; i < comb->n; i++)
653 tree type, name;
654 enum tree_code code;
656 e = comb->elts[i].val;
657 type = TREE_TYPE (e);
658 name = e;
659 /* Look through some conversions. */
660 if (CONVERT_EXPR_P (e)
661 && (TYPE_PRECISION (type)
662 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
663 name = TREE_OPERAND (e, 0);
664 if (TREE_CODE (name) != SSA_NAME)
665 continue;
666 def = SSA_NAME_DEF_STMT (name);
667 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
668 continue;
670 code = gimple_assign_rhs_code (def);
671 if (code != SSA_NAME
672 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
673 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
674 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
675 continue;
677 /* We do not know whether the reference retains its value at the
678 place where the expansion is used. */
679 if (TREE_CODE_CLASS (code) == tcc_reference)
680 continue;
682 if (!*cache)
683 *cache = new hash_map<tree, name_expansion *>;
684 name_expansion **slot = &(*cache)->get_or_insert (e);
685 exp = *slot;
687 if (!exp)
689 exp = XNEW (struct name_expansion);
690 exp->in_progress = 1;
691 *slot = exp;
692 /* In principle this is a generally valid folding, but
693 it is not unconditionally an optimization, so do it
694 here and not in fold_unary. */
695 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
696 than the type of X and overflow for the type of X is
697 undefined. */
698 if (e != name
699 && INTEGRAL_TYPE_P (type)
700 && INTEGRAL_TYPE_P (TREE_TYPE (name))
701 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
702 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
703 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
704 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
705 rhs = fold_build2 (code, type,
706 fold_convert (type, gimple_assign_rhs1 (def)),
707 fold_convert (type, gimple_assign_rhs2 (def)));
708 else
710 rhs = gimple_assign_rhs_to_tree (def);
711 if (e != name)
712 rhs = fold_convert (type, rhs);
714 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
715 exp->expansion = current;
716 exp->in_progress = 0;
718 else
720 /* Since we follow the definitions in the SSA form, we should not
721 enter a cycle unless we pass through a phi node. */
722 gcc_assert (!exp->in_progress);
723 current = exp->expansion;
726 /* Accumulate the new terms to TO_ADD, so that we do not modify
727 COMB while traversing it; include the term -coef * E, to remove
728 it from COMB. */
729 scale = comb->elts[i].coef;
730 aff_combination_zero (&curre, comb->type);
731 aff_combination_add_elt (&curre, e, -scale);
732 aff_combination_scale (&current, scale);
733 aff_combination_add (&to_add, &current);
734 aff_combination_add (&to_add, &curre);
736 aff_combination_add (comb, &to_add);
739 /* Similar to tree_to_aff_combination, but follows SSA name definitions
740 and expands them recursively. CACHE is used to cache the expansions
741 of the ssa names, to avoid exponential time complexity for cases
742 like
744 a1 = a0 + a0;
745 a2 = a1 + a1;
746 a3 = a2 + a2;
747 ... */
749 void
750 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
751 hash_map<tree, name_expansion *> **cache)
753 tree_to_aff_combination (expr, type, comb);
754 aff_combination_expand (comb, cache);
757 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
758 hash_map::traverse. */
760 bool
761 free_name_expansion (tree const &, name_expansion **value, void *)
763 free (*value);
764 return true;
767 /* Frees memory allocated for the CACHE used by
768 tree_to_aff_combination_expand. */
770 void
771 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
773 if (!*cache)
774 return;
776 (*cache)->traverse<void *, free_name_expansion> (NULL);
777 delete (*cache);
778 *cache = NULL;
781 /* If VAL != CST * DIV for any constant CST, returns false.
782 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
783 and if they are different, returns false. Finally, if neither of these
784 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
785 is set to true. */
787 static bool
788 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
789 bool *mult_set, widest_int *mult)
791 widest_int rem, cst;
793 if (val == 0)
795 if (*mult_set && mult != 0)
796 return false;
797 *mult_set = true;
798 *mult = 0;
799 return true;
802 if (div == 0)
803 return false;
805 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
806 return false;
808 if (*mult_set && *mult != cst)
809 return false;
811 *mult_set = true;
812 *mult = cst;
813 return true;
816 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
817 X is stored to MULT. */
819 bool
820 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
821 widest_int *mult)
823 bool mult_set = false;
824 unsigned i;
826 if (val->n == 0 && val->offset == 0)
828 *mult = 0;
829 return true;
831 if (val->n != div->n)
832 return false;
834 if (val->rest || div->rest)
835 return false;
837 if (!wide_int_constant_multiple_p (val->offset, div->offset,
838 &mult_set, mult))
839 return false;
841 for (i = 0; i < div->n; i++)
843 struct aff_comb_elt *elt
844 = aff_combination_find_elt (val, div->elts[i].val, NULL);
845 if (!elt)
846 return false;
847 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
848 &mult_set, mult))
849 return false;
852 gcc_assert (mult_set);
853 return true;
856 /* Prints the affine VAL to the FILE. */
858 static void
859 print_aff (FILE *file, aff_tree *val)
861 unsigned i;
862 signop sgn = TYPE_SIGN (val->type);
863 if (POINTER_TYPE_P (val->type))
864 sgn = SIGNED;
865 fprintf (file, "{\n type = ");
866 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
867 fprintf (file, "\n offset = ");
868 print_dec (val->offset, file, sgn);
869 if (val->n > 0)
871 fprintf (file, "\n elements = {\n");
872 for (i = 0; i < val->n; i++)
874 fprintf (file, " [%d] = ", i);
875 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
877 fprintf (file, " * ");
878 print_dec (val->elts[i].coef, file, sgn);
879 if (i != val->n - 1)
880 fprintf (file, ", \n");
882 fprintf (file, "\n }");
884 if (val->rest)
886 fprintf (file, "\n rest = ");
887 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
889 fprintf (file, "\n}");
892 /* Prints the affine VAL to the standard error, used for debugging. */
894 DEBUG_FUNCTION void
895 debug_aff (aff_tree *val)
897 print_aff (stderr, val);
898 fprintf (stderr, "\n");
901 /* Computes address of the reference REF in ADDR. The size of the accessed
902 location is stored to SIZE. Returns the ultimate containing object to
903 which REF refers. */
905 tree
906 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
908 HOST_WIDE_INT bitsize, bitpos;
909 tree toff;
910 machine_mode mode;
911 int uns, rev, vol;
912 aff_tree tmp;
913 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
914 &uns, &rev, &vol, false);
915 tree base_addr = build_fold_addr_expr (base);
917 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
919 tree_to_aff_combination (base_addr, sizetype, addr);
921 if (toff)
923 tree_to_aff_combination (toff, sizetype, &tmp);
924 aff_combination_add (addr, &tmp);
927 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
928 aff_combination_add (addr, &tmp);
930 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
932 return base;
935 /* Returns true if a region of size SIZE1 at position 0 and a region of
936 size SIZE2 at position DIFF cannot overlap. */
938 bool
939 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
940 const widest_int &size2)
942 /* Unless the difference is a constant, we fail. */
943 if (diff->n != 0)
944 return false;
946 if (wi::neg_p (diff->offset))
948 /* The second object is before the first one, we succeed if the last
949 element of the second object is before the start of the first one. */
950 return wi::neg_p (diff->offset + size2 - 1);
952 else
954 /* We succeed if the second object starts after the first one ends. */
955 return wi::les_p (size1, diff->offset);