1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "tree-dump.h"
27 #include "pointer-set.h"
28 #include "tree-affine.h"
32 /* Extends CST as appropriate for the affine combinations COMB. */
35 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
37 return double_int_sext (cst
, TYPE_PRECISION (comb
->type
));
40 /* Initializes affine combination COMB so that its value is zero in TYPE. */
43 aff_combination_zero (aff_tree
*comb
, tree type
)
46 comb
->offset
= double_int_zero
;
48 comb
->rest
= NULL_TREE
;
51 /* Sets COMB to CST. */
54 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
56 aff_combination_zero (comb
, type
);
57 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
60 /* Sets COMB to single element ELT. */
63 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
65 aff_combination_zero (comb
, type
);
68 comb
->elts
[0].val
= elt
;
69 comb
->elts
[0].coef
= double_int_one
;
72 /* Scales COMB by SCALE. */
75 aff_combination_scale (aff_tree
*comb
, double_int scale
)
79 scale
= double_int_ext_for_comb (scale
, comb
);
80 if (double_int_one_p (scale
))
83 if (double_int_zero_p (scale
))
85 aff_combination_zero (comb
, comb
->type
);
90 = double_int_ext_for_comb (double_int_mul (scale
, comb
->offset
), comb
);
91 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
96 = double_int_ext_for_comb (double_int_mul (scale
, comb
->elts
[i
].coef
),
98 /* A coefficient may become zero due to overflow. Remove the zero
100 if (double_int_zero_p (new_coef
))
102 comb
->elts
[j
].coef
= new_coef
;
103 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
110 tree type
= comb
->type
;
111 if (POINTER_TYPE_P (type
))
113 if (comb
->n
< MAX_AFF_ELTS
)
115 comb
->elts
[comb
->n
].coef
= scale
;
116 comb
->elts
[comb
->n
].val
= comb
->rest
;
117 comb
->rest
= NULL_TREE
;
121 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
122 double_int_to_tree (type
, scale
));
126 /* Adds ELT * SCALE to COMB. */
129 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
134 scale
= double_int_ext_for_comb (scale
, comb
);
135 if (double_int_zero_p (scale
))
138 for (i
= 0; i
< comb
->n
; i
++)
139 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
143 new_coef
= double_int_add (comb
->elts
[i
].coef
, scale
);
144 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
145 if (!double_int_zero_p (new_coef
))
147 comb
->elts
[i
].coef
= new_coef
;
152 comb
->elts
[i
] = comb
->elts
[comb
->n
];
156 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
157 comb
->elts
[comb
->n
].coef
= double_int_one
;
158 comb
->elts
[comb
->n
].val
= comb
->rest
;
159 comb
->rest
= NULL_TREE
;
164 if (comb
->n
< MAX_AFF_ELTS
)
166 comb
->elts
[comb
->n
].coef
= scale
;
167 comb
->elts
[comb
->n
].val
= elt
;
173 if (POINTER_TYPE_P (type
))
176 if (double_int_one_p (scale
))
177 elt
= fold_convert (type
, elt
);
179 elt
= fold_build2 (MULT_EXPR
, type
,
180 fold_convert (type
, elt
),
181 double_int_to_tree (type
, scale
));
184 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
193 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
195 c
->offset
= double_int_ext_for_comb (double_int_add (c
->offset
, cst
), c
);
198 /* Adds COMB2 to COMB1. */
201 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
205 aff_combination_add_cst (comb1
, comb2
->offset
);
206 for (i
= 0; i
< comb2
->n
; i
++)
207 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
209 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
212 /* Converts affine combination COMB to TYPE. */
215 aff_combination_convert (aff_tree
*comb
, tree type
)
218 tree comb_type
= comb
->type
;
220 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
222 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
223 tree_to_aff_combination (val
, type
, comb
);
228 if (comb
->rest
&& !POINTER_TYPE_P (type
))
229 comb
->rest
= fold_convert (type
, comb
->rest
);
231 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
234 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
235 for (i
= j
= 0; i
< comb
->n
; i
++)
237 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
238 if (double_int_zero_p (new_coef
))
240 comb
->elts
[j
].coef
= new_coef
;
241 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
246 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
248 comb
->elts
[comb
->n
].coef
= double_int_one
;
249 comb
->elts
[comb
->n
].val
= comb
->rest
;
250 comb
->rest
= NULL_TREE
;
255 /* Splits EXPR into an affine combination of parts. */
258 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
262 tree cst
, core
, toffset
;
263 HOST_WIDE_INT bitpos
, bitsize
;
264 enum machine_mode mode
;
265 int unsignedp
, volatilep
;
269 code
= TREE_CODE (expr
);
273 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
276 case POINTER_PLUS_EXPR
:
277 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
278 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
279 aff_combination_add (comb
, &tmp
);
284 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
285 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
286 if (code
== MINUS_EXPR
)
287 aff_combination_scale (&tmp
, double_int_minus_one
);
288 aff_combination_add (comb
, &tmp
);
292 cst
= TREE_OPERAND (expr
, 1);
293 if (TREE_CODE (cst
) != INTEGER_CST
)
295 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
296 aff_combination_scale (comb
, tree_to_double_int (cst
));
300 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
301 aff_combination_scale (comb
, double_int_minus_one
);
306 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
307 aff_combination_scale (comb
, double_int_minus_one
);
308 aff_combination_add_cst (comb
, double_int_minus_one
);
312 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
313 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
315 expr
= TREE_OPERAND (expr
, 0);
316 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
317 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
318 aff_combination_add (comb
, &tmp
);
321 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
322 &toffset
, &mode
, &unsignedp
, &volatilep
,
324 if (bitpos
% BITS_PER_UNIT
!= 0)
326 aff_combination_const (comb
, type
,
327 uhwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
328 core
= build_fold_addr_expr (core
);
329 if (TREE_CODE (core
) == ADDR_EXPR
)
330 aff_combination_add_elt (comb
, core
, double_int_one
);
333 tree_to_aff_combination (core
, type
, &tmp
);
334 aff_combination_add (comb
, &tmp
);
338 tree_to_aff_combination (toffset
, type
, &tmp
);
339 aff_combination_add (comb
, &tmp
);
344 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
345 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
347 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
349 aff_combination_elt (comb
, type
, expr
);
353 aff_combination_elt (comb
, type
,
354 build2 (MEM_REF
, TREE_TYPE (expr
),
355 TREE_OPERAND (expr
, 0),
357 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
358 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
359 aff_combination_add (comb
, &tmp
);
366 aff_combination_elt (comb
, type
, expr
);
369 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
373 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
378 if (POINTER_TYPE_P (type
))
381 scale
= double_int_ext_for_comb (scale
, comb
);
382 elt
= fold_convert (type1
, elt
);
384 if (double_int_one_p (scale
))
387 return fold_convert (type
, elt
);
389 if (POINTER_TYPE_P (type
))
390 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
391 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
394 if (double_int_minus_one_p (scale
))
397 return fold_convert (type
, fold_build1 (NEGATE_EXPR
, type1
, elt
));
399 if (POINTER_TYPE_P (type
))
401 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
402 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
404 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
408 return fold_convert (type
,
409 fold_build2 (MULT_EXPR
, type1
, elt
,
410 double_int_to_tree (type1
, scale
)));
412 if (double_int_negative_p (scale
))
415 scale
= double_int_neg (scale
);
420 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
421 double_int_to_tree (type1
, scale
));
422 if (POINTER_TYPE_P (type
))
424 if (code
== MINUS_EXPR
)
425 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
426 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
428 return fold_build2 (code
, type
, expr
, elt
);
431 /* Makes tree from the affine combination COMB. */
434 aff_combination_to_tree (aff_tree
*comb
)
436 tree type
= comb
->type
;
437 tree expr
= comb
->rest
;
441 if (POINTER_TYPE_P (type
))
444 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
446 for (i
= 0; i
< comb
->n
; i
++)
447 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
450 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
452 if (double_int_negative_p (comb
->offset
))
454 off
= double_int_neg (comb
->offset
);
455 sgn
= double_int_minus_one
;
460 sgn
= double_int_one
;
462 return add_elt_to_tree (expr
, type
, double_int_to_tree (type1
, off
), sgn
,
466 /* Copies the tree elements of COMB to ensure that they are not shared. */
469 unshare_aff_combination (aff_tree
*comb
)
473 for (i
= 0; i
< comb
->n
; i
++)
474 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
476 comb
->rest
= unshare_expr (comb
->rest
);
479 /* Remove M-th element from COMB. */
482 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
486 comb
->elts
[m
] = comb
->elts
[comb
->n
];
489 comb
->elts
[comb
->n
].coef
= double_int_one
;
490 comb
->elts
[comb
->n
].val
= comb
->rest
;
491 comb
->rest
= NULL_TREE
;
496 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
497 C * COEF is added to R. */
501 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
507 for (i
= 0; i
< c
->n
; i
++)
509 aval
= c
->elts
[i
].val
;
512 type
= TREE_TYPE (aval
);
513 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
514 fold_convert (type
, val
));
517 aff_combination_add_elt (r
, aval
,
518 double_int_mul (coef
, c
->elts
[i
].coef
));
526 type
= TREE_TYPE (aval
);
527 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
528 fold_convert (type
, val
));
531 aff_combination_add_elt (r
, aval
, coef
);
535 aff_combination_add_elt (r
, val
,
536 double_int_mul (coef
, c
->offset
));
538 aff_combination_add_cst (r
, double_int_mul (coef
, c
->offset
));
541 /* Multiplies C1 by C2, storing the result to R */
544 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
547 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
549 aff_combination_zero (r
, c1
->type
);
551 for (i
= 0; i
< c2
->n
; i
++)
552 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
554 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
555 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
558 /* Returns the element of COMB whose value is VAL, or NULL if no such
559 element exists. If IDX is not NULL, it is set to the index of VAL in
562 static struct aff_comb_elt
*
563 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
567 for (i
= 0; i
< comb
->n
; i
++)
568 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
573 return &comb
->elts
[i
];
579 /* Element of the cache that maps ssa name NAME to its expanded form
580 as an affine expression EXPANSION. */
582 struct name_expansion
586 /* True if the expansion for the name is just being generated. */
587 unsigned in_progress
: 1;
590 /* Expands SSA names in COMB recursively. CACHE is used to cache the
594 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
595 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
598 aff_tree to_add
, current
, curre
;
603 struct name_expansion
*exp
;
605 aff_combination_zero (&to_add
, comb
->type
);
606 for (i
= 0; i
< comb
->n
; i
++)
611 e
= comb
->elts
[i
].val
;
612 type
= TREE_TYPE (e
);
614 /* Look through some conversions. */
615 if (TREE_CODE (e
) == NOP_EXPR
616 && (TYPE_PRECISION (type
)
617 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
618 name
= TREE_OPERAND (e
, 0);
619 if (TREE_CODE (name
) != SSA_NAME
)
621 def
= SSA_NAME_DEF_STMT (name
);
622 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
625 code
= gimple_assign_rhs_code (def
);
627 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
628 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
629 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
632 /* We do not know whether the reference retains its value at the
633 place where the expansion is used. */
634 if (TREE_CODE_CLASS (code
) == tcc_reference
)
638 *cache
= pointer_map_create ();
639 slot
= pointer_map_insert (*cache
, e
);
640 exp
= (struct name_expansion
*) *slot
;
644 exp
= XNEW (struct name_expansion
);
645 exp
->in_progress
= 1;
647 /* In principle this is a generally valid folding, but
648 it is not unconditionally an optimization, so do it
649 here and not in fold_unary. */
650 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
651 than the type of X and overflow for the type of X is
654 && INTEGRAL_TYPE_P (type
)
655 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
656 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
657 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
658 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
659 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
660 rhs
= fold_build2 (code
, type
,
661 fold_convert (type
, gimple_assign_rhs1 (def
)),
662 fold_convert (type
, gimple_assign_rhs2 (def
)));
665 rhs
= gimple_assign_rhs_to_tree (def
);
667 rhs
= fold_convert (type
, rhs
);
669 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
670 exp
->expansion
= current
;
671 exp
->in_progress
= 0;
675 /* Since we follow the definitions in the SSA form, we should not
676 enter a cycle unless we pass through a phi node. */
677 gcc_assert (!exp
->in_progress
);
678 current
= exp
->expansion
;
681 /* Accumulate the new terms to TO_ADD, so that we do not modify
682 COMB while traversing it; include the term -coef * E, to remove
684 scale
= comb
->elts
[i
].coef
;
685 aff_combination_zero (&curre
, comb
->type
);
686 aff_combination_add_elt (&curre
, e
, double_int_neg (scale
));
687 aff_combination_scale (¤t
, scale
);
688 aff_combination_add (&to_add
, ¤t
);
689 aff_combination_add (&to_add
, &curre
);
691 aff_combination_add (comb
, &to_add
);
694 /* Similar to tree_to_aff_combination, but follows SSA name definitions
695 and expands them recursively. CACHE is used to cache the expansions
696 of the ssa names, to avoid exponential time complexity for cases
705 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
706 struct pointer_map_t
**cache
)
708 tree_to_aff_combination (expr
, type
, comb
);
709 aff_combination_expand (comb
, cache
);
712 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
713 pointer_map_traverse. */
716 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
717 void *data ATTRIBUTE_UNUSED
)
719 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
725 /* Frees memory allocated for the CACHE used by
726 tree_to_aff_combination_expand. */
729 free_affine_expand_cache (struct pointer_map_t
**cache
)
734 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
735 pointer_map_destroy (*cache
);
739 /* If VAL != CST * DIV for any constant CST, returns false.
740 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
741 additionally compares CST and MULT, and if they are different,
742 returns false. Finally, if neither of these two cases occur,
743 true is returned, and if CST != 0, CST is stored to MULT and
744 MULT_SET is set to true. */
747 double_int_constant_multiple_p (double_int val
, double_int div
,
748 bool *mult_set
, double_int
*mult
)
752 if (double_int_zero_p (val
))
755 if (double_int_zero_p (div
))
758 cst
= double_int_sdivmod (val
, div
, FLOOR_DIV_EXPR
, &rem
);
759 if (!double_int_zero_p (rem
))
762 if (*mult_set
&& !double_int_equal_p (*mult
, cst
))
770 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
771 X is stored to MULT. */
774 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
777 bool mult_set
= false;
780 if (val
->n
== 0 && double_int_zero_p (val
->offset
))
782 *mult
= double_int_zero
;
785 if (val
->n
!= div
->n
)
788 if (val
->rest
|| div
->rest
)
791 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
795 for (i
= 0; i
< div
->n
; i
++)
797 struct aff_comb_elt
*elt
798 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
801 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
806 gcc_assert (mult_set
);
810 /* Prints the affine VAL to the FILE. */
813 print_aff (FILE *file
, aff_tree
*val
)
816 bool uns
= TYPE_UNSIGNED (val
->type
);
817 if (POINTER_TYPE_P (val
->type
))
819 fprintf (file
, "{\n type = ");
820 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
821 fprintf (file
, "\n offset = ");
822 dump_double_int (file
, val
->offset
, uns
);
825 fprintf (file
, "\n elements = {\n");
826 for (i
= 0; i
< val
->n
; i
++)
828 fprintf (file
, " [%d] = ", i
);
829 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
831 fprintf (file
, " * ");
832 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
834 fprintf (file
, ", \n");
836 fprintf (file
, "\n }");
840 fprintf (file
, "\n rest = ");
841 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
843 fprintf (file
, "\n}");
846 /* Prints the affine VAL to the standard error, used for debugging. */
849 debug_aff (aff_tree
*val
)
851 print_aff (stderr
, val
);
852 fprintf (stderr
, "\n");
855 /* Returns address of the reference REF in ADDR. The size of the accessed
856 location is stored to SIZE. */
859 get_inner_reference_aff (tree ref
, aff_tree
*addr
, double_int
*size
)
861 HOST_WIDE_INT bitsize
, bitpos
;
863 enum machine_mode mode
;
866 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
868 tree base_addr
= build_fold_addr_expr (base
);
870 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
872 tree_to_aff_combination (base_addr
, sizetype
, addr
);
876 tree_to_aff_combination (toff
, sizetype
, &tmp
);
877 aff_combination_add (addr
, &tmp
);
880 aff_combination_const (&tmp
, sizetype
,
881 shwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
882 aff_combination_add (addr
, &tmp
);
884 *size
= shwi_to_double_int ((bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);