libfuncs.h (LTI_synchronize): New libfunc_index.
[official-gcc.git] / gcc / tree-affine.c
blob59ac3d786b41c7f040b1d4090f118f169f59ec28
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "output.h"
29 #include "diagnostic.h"
30 #include "tree-dump.h"
31 #include "pointer-set.h"
32 #include "tree-affine.h"
33 #include "tree-gimple.h"
34 #include "flags.h"
36 /* Extends CST as appropriate for the affine combinations COMB. */
38 double_int
39 double_int_ext_for_comb (double_int cst, aff_tree *comb)
41 return double_int_sext (cst, TYPE_PRECISION (comb->type));
44 /* Initializes affine combination COMB so that its value is zero in TYPE. */
46 static void
47 aff_combination_zero (aff_tree *comb, tree type)
49 comb->type = type;
50 comb->offset = double_int_zero;
51 comb->n = 0;
52 comb->rest = NULL_TREE;
55 /* Sets COMB to CST. */
57 void
58 aff_combination_const (aff_tree *comb, tree type, double_int cst)
60 aff_combination_zero (comb, type);
61 comb->offset = double_int_ext_for_comb (cst, comb);
64 /* Sets COMB to single element ELT. */
66 void
67 aff_combination_elt (aff_tree *comb, tree type, tree elt)
69 aff_combination_zero (comb, type);
71 comb->n = 1;
72 comb->elts[0].val = elt;
73 comb->elts[0].coef = double_int_one;
76 /* Scales COMB by SCALE. */
78 void
79 aff_combination_scale (aff_tree *comb, double_int scale)
81 unsigned i, j;
83 scale = double_int_ext_for_comb (scale, comb);
84 if (double_int_one_p (scale))
85 return;
87 if (double_int_zero_p (scale))
89 aff_combination_zero (comb, comb->type);
90 return;
93 comb->offset
94 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
95 for (i = 0, j = 0; i < comb->n; i++)
97 double_int new_coef;
99 new_coef
100 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
101 comb);
102 /* A coefficient may become zero due to overflow. Remove the zero
103 elements. */
104 if (double_int_zero_p (new_coef))
105 continue;
106 comb->elts[j].coef = new_coef;
107 comb->elts[j].val = comb->elts[i].val;
108 j++;
110 comb->n = j;
112 if (comb->rest)
114 tree type = comb->type;
115 if (POINTER_TYPE_P (type))
116 type = sizetype;
117 if (comb->n < MAX_AFF_ELTS)
119 comb->elts[comb->n].coef = scale;
120 comb->elts[comb->n].val = comb->rest;
121 comb->rest = NULL_TREE;
122 comb->n++;
124 else
125 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
126 double_int_to_tree (type, scale));
130 /* Adds ELT * SCALE to COMB. */
132 void
133 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
135 unsigned i;
136 tree type;
138 scale = double_int_ext_for_comb (scale, comb);
139 if (double_int_zero_p (scale))
140 return;
142 for (i = 0; i < comb->n; i++)
143 if (operand_equal_p (comb->elts[i].val, elt, 0))
145 double_int new_coef;
147 new_coef = double_int_add (comb->elts[i].coef, scale);
148 new_coef = double_int_ext_for_comb (new_coef, comb);
149 if (!double_int_zero_p (new_coef))
151 comb->elts[i].coef = new_coef;
152 return;
155 comb->n--;
156 comb->elts[i] = comb->elts[comb->n];
158 if (comb->rest)
160 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
161 comb->elts[comb->n].coef = double_int_one;
162 comb->elts[comb->n].val = comb->rest;
163 comb->rest = NULL_TREE;
164 comb->n++;
166 return;
168 if (comb->n < MAX_AFF_ELTS)
170 comb->elts[comb->n].coef = scale;
171 comb->elts[comb->n].val = elt;
172 comb->n++;
173 return;
176 type = comb->type;
177 if (POINTER_TYPE_P (type))
178 type = sizetype;
180 if (double_int_one_p (scale))
181 elt = fold_convert (type, elt);
182 else
183 elt = fold_build2 (MULT_EXPR, type,
184 fold_convert (type, elt),
185 double_int_to_tree (type, scale));
187 if (comb->rest)
188 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
189 elt);
190 else
191 comb->rest = elt;
194 /* Adds CST to C. */
196 static void
197 aff_combination_add_cst (aff_tree *c, double_int cst)
199 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
202 /* Adds COMB2 to COMB1. */
204 void
205 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
207 unsigned i;
209 aff_combination_add_cst (comb1, comb2->offset);
210 for (i = 0; i < comb2->n; i++)
211 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
212 if (comb2->rest)
213 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
216 /* Converts affine combination COMB to TYPE. */
218 void
219 aff_combination_convert (aff_tree *comb, tree type)
221 unsigned i, j;
222 tree comb_type = comb->type;
224 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
226 tree val = fold_convert (type, aff_combination_to_tree (comb));
227 tree_to_aff_combination (val, type, comb);
228 return;
231 comb->type = type;
232 if (comb->rest && !POINTER_TYPE_P (type))
233 comb->rest = fold_convert (type, comb->rest);
235 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
236 return;
238 comb->offset = double_int_ext_for_comb (comb->offset, comb);
239 for (i = j = 0; i < comb->n; i++)
241 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
242 if (double_int_zero_p (new_coef))
243 continue;
244 comb->elts[j].coef = new_coef;
245 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
246 j++;
249 comb->n = j;
250 if (comb->n < MAX_AFF_ELTS && comb->rest)
252 comb->elts[comb->n].coef = double_int_one;
253 comb->elts[comb->n].val = comb->rest;
254 comb->rest = NULL_TREE;
255 comb->n++;
259 /* Splits EXPR into an affine combination of parts. */
261 void
262 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
264 aff_tree tmp;
265 enum tree_code code;
266 tree cst, core, toffset;
267 HOST_WIDE_INT bitpos, bitsize;
268 enum machine_mode mode;
269 int unsignedp, volatilep;
271 STRIP_NOPS (expr);
273 code = TREE_CODE (expr);
274 switch (code)
276 case INTEGER_CST:
277 aff_combination_const (comb, type, tree_to_double_int (expr));
278 return;
280 case POINTER_PLUS_EXPR:
281 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
282 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
283 aff_combination_add (comb, &tmp);
284 return;
286 case PLUS_EXPR:
287 case MINUS_EXPR:
288 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
289 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
290 if (code == MINUS_EXPR)
291 aff_combination_scale (&tmp, double_int_minus_one);
292 aff_combination_add (comb, &tmp);
293 return;
295 case MULT_EXPR:
296 cst = TREE_OPERAND (expr, 1);
297 if (TREE_CODE (cst) != INTEGER_CST)
298 break;
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300 aff_combination_scale (comb, tree_to_double_int (cst));
301 return;
303 case NEGATE_EXPR:
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, double_int_minus_one);
306 return;
308 case BIT_NOT_EXPR:
309 /* ~x = -x - 1 */
310 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
311 aff_combination_scale (comb, double_int_minus_one);
312 aff_combination_add_cst (comb, double_int_minus_one);
313 return;
315 case ADDR_EXPR:
316 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
317 &toffset, &mode, &unsignedp, &volatilep,
318 false);
319 if (bitpos % BITS_PER_UNIT != 0)
320 break;
321 aff_combination_const (comb, type,
322 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
323 core = build_fold_addr_expr (core);
324 if (TREE_CODE (core) == ADDR_EXPR)
325 aff_combination_add_elt (comb, core, double_int_one);
326 else
328 tree_to_aff_combination (core, type, &tmp);
329 aff_combination_add (comb, &tmp);
331 if (toffset)
333 tree_to_aff_combination (toffset, type, &tmp);
334 aff_combination_add (comb, &tmp);
336 return;
338 default:
339 break;
342 aff_combination_elt (comb, type, expr);
345 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
346 combination COMB. */
348 static tree
349 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
350 aff_tree *comb)
352 enum tree_code code;
353 tree type1 = type;
354 if (POINTER_TYPE_P (type))
355 type1 = sizetype;
357 scale = double_int_ext_for_comb (scale, comb);
358 elt = fold_convert (type1, elt);
360 if (double_int_one_p (scale))
362 if (!expr)
363 return fold_convert (type, elt);
365 if (POINTER_TYPE_P (type))
366 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
367 return fold_build2 (PLUS_EXPR, type, expr, elt);
370 if (double_int_minus_one_p (scale))
372 if (!expr)
373 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
375 if (POINTER_TYPE_P (type))
377 elt = fold_build1 (NEGATE_EXPR, type1, elt);
378 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
380 return fold_build2 (MINUS_EXPR, type, expr, elt);
383 if (!expr)
384 return fold_convert (type,
385 fold_build2 (MULT_EXPR, type1, elt,
386 double_int_to_tree (type1, scale)));
388 if (double_int_negative_p (scale))
390 code = MINUS_EXPR;
391 scale = double_int_neg (scale);
393 else
394 code = PLUS_EXPR;
396 elt = fold_build2 (MULT_EXPR, type1, elt,
397 double_int_to_tree (type1, scale));
398 if (POINTER_TYPE_P (type))
400 if (code == MINUS_EXPR)
401 elt = fold_build1 (NEGATE_EXPR, type1, elt);
402 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
404 return fold_build2 (code, type, expr, elt);
407 /* Makes tree from the affine combination COMB. */
409 tree
410 aff_combination_to_tree (aff_tree *comb)
412 tree type = comb->type;
413 tree expr = comb->rest;
414 unsigned i;
415 double_int off, sgn;
416 tree type1 = type;
417 if (POINTER_TYPE_P (type))
418 type1 = sizetype;
420 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
422 for (i = 0; i < comb->n; i++)
423 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
424 comb);
426 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
427 unsigned. */
428 if (double_int_negative_p (comb->offset))
430 off = double_int_neg (comb->offset);
431 sgn = double_int_minus_one;
433 else
435 off = comb->offset;
436 sgn = double_int_one;
438 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
439 comb);
442 /* Copies the tree elements of COMB to ensure that they are not shared. */
444 void
445 unshare_aff_combination (aff_tree *comb)
447 unsigned i;
449 for (i = 0; i < comb->n; i++)
450 comb->elts[i].val = unshare_expr (comb->elts[i].val);
451 if (comb->rest)
452 comb->rest = unshare_expr (comb->rest);
455 /* Remove M-th element from COMB. */
457 void
458 aff_combination_remove_elt (aff_tree *comb, unsigned m)
460 comb->n--;
461 if (m <= comb->n)
462 comb->elts[m] = comb->elts[comb->n];
463 if (comb->rest)
465 comb->elts[comb->n].coef = double_int_one;
466 comb->elts[comb->n].val = comb->rest;
467 comb->rest = NULL_TREE;
468 comb->n++;
472 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
473 C * COEF is added to R. */
476 static void
477 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
478 aff_tree *r)
480 unsigned i;
481 tree aval, type;
483 for (i = 0; i < c->n; i++)
485 aval = c->elts[i].val;
486 if (val)
488 type = TREE_TYPE (aval);
489 aval = fold_build2 (MULT_EXPR, type, aval,
490 fold_convert (type, val));
493 aff_combination_add_elt (r, aval,
494 double_int_mul (coef, c->elts[i].coef));
497 if (c->rest)
499 aval = c->rest;
500 if (val)
502 type = TREE_TYPE (aval);
503 aval = fold_build2 (MULT_EXPR, type, aval,
504 fold_convert (type, val));
507 aff_combination_add_elt (r, aval, coef);
510 if (val)
511 aff_combination_add_elt (r, val,
512 double_int_mul (coef, c->offset));
513 else
514 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
517 /* Multiplies C1 by C2, storing the result to R */
519 void
520 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
522 unsigned i;
523 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
525 aff_combination_zero (r, c1->type);
527 for (i = 0; i < c2->n; i++)
528 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
529 if (c2->rest)
530 aff_combination_add_product (c1, double_int_one, c2->rest, r);
531 aff_combination_add_product (c1, c2->offset, NULL, r);
534 /* Returns the element of COMB whose value is VAL, or NULL if no such
535 element exists. If IDX is not NULL, it is set to the index of VAL in
536 COMB. */
538 static struct aff_comb_elt *
539 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
541 unsigned i;
543 for (i = 0; i < comb->n; i++)
544 if (operand_equal_p (comb->elts[i].val, val, 0))
546 if (idx)
547 *idx = i;
549 return &comb->elts[i];
552 return NULL;
555 /* Element of the cache that maps ssa name NAME to its expanded form
556 as an affine expression EXPANSION. */
558 struct name_expansion
560 aff_tree expansion;
562 /* True if the expansion for the name is just being generated. */
563 unsigned in_progress : 1;
566 /* Expands SSA names in COMB recursively. CACHE is used to cache the
567 results. */
569 void
570 aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
572 unsigned i;
573 aff_tree to_add, current, curre;
574 tree e, def, rhs;
575 double_int scale;
576 void **slot;
577 struct name_expansion *exp;
579 aff_combination_zero (&to_add, comb->type);
580 for (i = 0; i < comb->n; i++)
582 tree type, name;
583 e = comb->elts[i].val;
584 type = TREE_TYPE (e);
585 name = e;
586 /* Look through some conversions. */
587 if (TREE_CODE (e) == NOP_EXPR
588 && (TYPE_PRECISION (type)
589 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
590 name = TREE_OPERAND (e, 0);
591 if (TREE_CODE (name) != SSA_NAME)
592 continue;
593 def = SSA_NAME_DEF_STMT (name);
594 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
595 || GIMPLE_STMT_OPERAND (def, 0) != name)
596 continue;
598 rhs = GIMPLE_STMT_OPERAND (def, 1);
599 if (TREE_CODE (rhs) != SSA_NAME
600 && !EXPR_P (rhs)
601 && !is_gimple_min_invariant (rhs))
602 continue;
604 /* We do not know whether the reference retains its value at the
605 place where the expansion is used. */
606 if (REFERENCE_CLASS_P (rhs))
607 continue;
609 if (!*cache)
610 *cache = pointer_map_create ();
611 slot = pointer_map_insert (*cache, e);
612 exp = (struct name_expansion *) *slot;
614 if (!exp)
616 exp = XNEW (struct name_expansion);
617 exp->in_progress = 1;
618 *slot = exp;
619 if (e != name)
621 /* In principle this is a generally valid folding, but
622 it is not unconditionally an optimization, so do it
623 here and not in fold_unary. */
624 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
625 than the type of X and overflow for the type of X is
626 undefined. */
627 if (INTEGRAL_TYPE_P (type)
628 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
629 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs))
630 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs))
631 && (TREE_CODE (rhs) == PLUS_EXPR
632 || TREE_CODE (rhs) == MINUS_EXPR
633 || TREE_CODE (rhs) == MULT_EXPR)
634 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
636 rhs = fold_build2 (TREE_CODE (rhs), type,
637 fold_convert (type, TREE_OPERAND (rhs, 0)),
638 fold_convert (type, TREE_OPERAND (rhs, 1)));
640 else
641 rhs = fold_convert (type, rhs);
643 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
644 exp->expansion = current;
645 exp->in_progress = 0;
647 else
649 /* Since we follow the definitions in the SSA form, we should not
650 enter a cycle unless we pass through a phi node. */
651 gcc_assert (!exp->in_progress);
652 current = exp->expansion;
655 /* Accumulate the new terms to TO_ADD, so that we do not modify
656 COMB while traversing it; include the term -coef * E, to remove
657 it from COMB. */
658 scale = comb->elts[i].coef;
659 aff_combination_zero (&curre, comb->type);
660 aff_combination_add_elt (&curre, e, double_int_neg (scale));
661 aff_combination_scale (&current, scale);
662 aff_combination_add (&to_add, &current);
663 aff_combination_add (&to_add, &curre);
665 aff_combination_add (comb, &to_add);
668 /* Similar to tree_to_aff_combination, but follows SSA name definitions
669 and expands them recursively. CACHE is used to cache the expansions
670 of the ssa names, to avoid exponential time complexity for cases
671 like
673 a1 = a0 + a0;
674 a2 = a1 + a1;
675 a3 = a2 + a2;
676 ... */
678 void
679 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
680 struct pointer_map_t **cache)
682 tree_to_aff_combination (expr, type, comb);
683 aff_combination_expand (comb, cache);
686 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
687 pointer_map_traverse. */
689 static bool
690 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
691 void *data ATTRIBUTE_UNUSED)
693 struct name_expansion *const exp = (struct name_expansion *) *value;
695 free (exp);
696 return true;
699 /* Frees memory allocated for the CACHE used by
700 tree_to_aff_combination_expand. */
702 void
703 free_affine_expand_cache (struct pointer_map_t **cache)
705 if (!*cache)
706 return;
708 pointer_map_traverse (*cache, free_name_expansion, NULL);
709 pointer_map_destroy (*cache);
710 *cache = NULL;
713 /* If VAL != CST * DIV for any constant CST, returns false.
714 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
715 additionally compares CST and MULT, and if they are different,
716 returns false. Finally, if neither of these two cases occur,
717 true is returned, and if CST != 0, CST is stored to MULT and
718 MULT_SET is set to true. */
720 static bool
721 double_int_constant_multiple_p (double_int val, double_int div,
722 bool *mult_set, double_int *mult)
724 double_int rem, cst;
726 if (double_int_zero_p (val))
727 return true;
729 if (double_int_zero_p (div))
730 return false;
732 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
733 if (!double_int_zero_p (rem))
734 return false;
736 if (*mult_set && !double_int_equal_p (*mult, cst))
737 return false;
739 *mult_set = true;
740 *mult = cst;
741 return true;
744 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
745 X is stored to MULT. */
747 bool
748 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
749 double_int *mult)
751 bool mult_set = false;
752 unsigned i;
754 if (val->n == 0 && double_int_zero_p (val->offset))
756 *mult = double_int_zero;
757 return true;
759 if (val->n != div->n)
760 return false;
762 if (val->rest || div->rest)
763 return false;
765 if (!double_int_constant_multiple_p (val->offset, div->offset,
766 &mult_set, mult))
767 return false;
769 for (i = 0; i < div->n; i++)
771 struct aff_comb_elt *elt
772 = aff_combination_find_elt (val, div->elts[i].val, NULL);
773 if (!elt)
774 return false;
775 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
776 &mult_set, mult))
777 return false;
780 gcc_assert (mult_set);
781 return true;
784 /* Prints the affine VAL to the FILE. */
786 void
787 print_aff (FILE *file, aff_tree *val)
789 unsigned i;
790 bool uns = TYPE_UNSIGNED (val->type);
791 if (POINTER_TYPE_P (val->type))
792 uns = false;
793 fprintf (file, "{\n type = ");
794 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
795 fprintf (file, "\n offset = ");
796 dump_double_int (file, val->offset, uns);
797 if (val->n > 0)
799 fprintf (file, "\n elements = {\n");
800 for (i = 0; i < val->n; i++)
802 fprintf (file, " [%d] = ", i);
803 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
805 fprintf (file, " * ");
806 dump_double_int (file, val->elts[i].coef, uns);
807 if (i != val->n - 1)
808 fprintf (file, ", \n");
810 fprintf (file, "\n }");
812 if (val->rest)
814 fprintf (file, "\n rest = ");
815 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
817 fprintf (file, "\n}");
820 /* Prints the affine VAL to the standard error, used for debugging. */
822 void
823 debug_aff (aff_tree *val)
825 print_aff (stderr, val);
826 fprintf (stderr, "\n");
829 /* Returns address of the reference REF in ADDR. The size of the accessed
830 location is stored to SIZE. */
832 void
833 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
835 HOST_WIDE_INT bitsize, bitpos;
836 tree toff;
837 enum machine_mode mode;
838 int uns, vol;
839 aff_tree tmp;
840 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
841 &uns, &vol, false);
842 tree base_addr = build_fold_addr_expr (base);
844 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
846 tree_to_aff_combination (base_addr, sizetype, addr);
848 if (toff)
850 tree_to_aff_combination (toff, sizetype, &tmp);
851 aff_combination_add (addr, &tmp);
854 aff_combination_const (&tmp, sizetype,
855 shwi_to_double_int (bitpos / BITS_PER_UNIT));
856 aff_combination_add (addr, &tmp);
858 *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);