1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
31 /* Extends CST as appropriate for the affine combinations COMB. */
34 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
36 return cst
.sext (TYPE_PRECISION (comb
->type
));
39 /* Initializes affine combination COMB so that its value is zero in TYPE. */
42 aff_combination_zero (aff_tree
*comb
, tree type
)
45 comb
->offset
= double_int_zero
;
47 comb
->rest
= NULL_TREE
;
50 /* Sets COMB to CST. */
53 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
55 aff_combination_zero (comb
, type
);
56 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
59 /* Sets COMB to single element ELT. */
62 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
64 aff_combination_zero (comb
, type
);
67 comb
->elts
[0].val
= elt
;
68 comb
->elts
[0].coef
= double_int_one
;
71 /* Scales COMB by SCALE. */
74 aff_combination_scale (aff_tree
*comb
, double_int scale
)
78 scale
= double_int_ext_for_comb (scale
, comb
);
84 aff_combination_zero (comb
, comb
->type
);
89 = double_int_ext_for_comb (scale
* comb
->offset
, comb
);
90 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
95 = double_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
96 /* A coefficient may become zero due to overflow. Remove the zero
98 if (new_coef
.is_zero ())
100 comb
->elts
[j
].coef
= new_coef
;
101 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
108 tree type
= comb
->type
;
109 if (POINTER_TYPE_P (type
))
111 if (comb
->n
< MAX_AFF_ELTS
)
113 comb
->elts
[comb
->n
].coef
= scale
;
114 comb
->elts
[comb
->n
].val
= comb
->rest
;
115 comb
->rest
= NULL_TREE
;
119 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
120 double_int_to_tree (type
, scale
));
124 /* Adds ELT * SCALE to COMB. */
127 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
132 scale
= double_int_ext_for_comb (scale
, comb
);
133 if (scale
.is_zero ())
136 for (i
= 0; i
< comb
->n
; i
++)
137 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
141 new_coef
= comb
->elts
[i
].coef
+ scale
;
142 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
143 if (!new_coef
.is_zero ())
145 comb
->elts
[i
].coef
= new_coef
;
150 comb
->elts
[i
] = comb
->elts
[comb
->n
];
154 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
155 comb
->elts
[comb
->n
].coef
= double_int_one
;
156 comb
->elts
[comb
->n
].val
= comb
->rest
;
157 comb
->rest
= NULL_TREE
;
162 if (comb
->n
< MAX_AFF_ELTS
)
164 comb
->elts
[comb
->n
].coef
= scale
;
165 comb
->elts
[comb
->n
].val
= elt
;
171 if (POINTER_TYPE_P (type
))
175 elt
= fold_convert (type
, elt
);
177 elt
= fold_build2 (MULT_EXPR
, type
,
178 fold_convert (type
, elt
),
179 double_int_to_tree (type
, scale
));
182 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
191 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
193 c
->offset
= double_int_ext_for_comb (c
->offset
+ cst
, c
);
196 /* Adds COMB2 to COMB1. */
199 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
203 aff_combination_add_cst (comb1
, comb2
->offset
);
204 for (i
= 0; i
< comb2
->n
; i
++)
205 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
207 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
210 /* Converts affine combination COMB to TYPE. */
213 aff_combination_convert (aff_tree
*comb
, tree type
)
216 tree comb_type
= comb
->type
;
218 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
220 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
221 tree_to_aff_combination (val
, type
, comb
);
226 if (comb
->rest
&& !POINTER_TYPE_P (type
))
227 comb
->rest
= fold_convert (type
, comb
->rest
);
229 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
232 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
233 for (i
= j
= 0; i
< comb
->n
; i
++)
235 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
236 if (new_coef
.is_zero ())
238 comb
->elts
[j
].coef
= new_coef
;
239 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
244 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
246 comb
->elts
[comb
->n
].coef
= double_int_one
;
247 comb
->elts
[comb
->n
].val
= comb
->rest
;
248 comb
->rest
= NULL_TREE
;
253 /* Splits EXPR into an affine combination of parts. */
256 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
260 tree cst
, core
, toffset
;
261 HOST_WIDE_INT bitpos
, bitsize
;
262 enum machine_mode mode
;
263 int unsignedp
, volatilep
;
267 code
= TREE_CODE (expr
);
271 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
274 case POINTER_PLUS_EXPR
:
275 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
276 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
277 aff_combination_add (comb
, &tmp
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
283 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
284 if (code
== MINUS_EXPR
)
285 aff_combination_scale (&tmp
, double_int_minus_one
);
286 aff_combination_add (comb
, &tmp
);
290 cst
= TREE_OPERAND (expr
, 1);
291 if (TREE_CODE (cst
) != INTEGER_CST
)
293 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
294 aff_combination_scale (comb
, tree_to_double_int (cst
));
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 aff_combination_scale (comb
, double_int_minus_one
);
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, double_int_minus_one
);
306 aff_combination_add_cst (comb
, double_int_minus_one
);
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
313 expr
= TREE_OPERAND (expr
, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
316 aff_combination_add (comb
, &tmp
);
319 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
320 &toffset
, &mode
, &unsignedp
, &volatilep
,
322 if (bitpos
% BITS_PER_UNIT
!= 0)
324 aff_combination_const (comb
, type
,
325 double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
));
326 core
= build_fold_addr_expr (core
);
327 if (TREE_CODE (core
) == ADDR_EXPR
)
328 aff_combination_add_elt (comb
, core
, double_int_one
);
331 tree_to_aff_combination (core
, type
, &tmp
);
332 aff_combination_add (comb
, &tmp
);
336 tree_to_aff_combination (toffset
, type
, &tmp
);
337 aff_combination_add (comb
, &tmp
);
342 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
343 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
345 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
347 aff_combination_elt (comb
, type
, expr
);
351 aff_combination_elt (comb
, type
,
352 build2 (MEM_REF
, TREE_TYPE (expr
),
353 TREE_OPERAND (expr
, 0),
355 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
356 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
357 aff_combination_add (comb
, &tmp
);
364 aff_combination_elt (comb
, type
, expr
);
367 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
371 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
376 if (POINTER_TYPE_P (type
))
379 scale
= double_int_ext_for_comb (scale
, comb
);
381 if (scale
.is_minus_one ()
382 && POINTER_TYPE_P (TREE_TYPE (elt
)))
384 elt
= convert_to_ptrofftype (elt
);
385 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
386 scale
= double_int_one
;
393 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
396 return fold_convert (type1
, elt
);
399 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
400 return fold_build_pointer_plus (expr
, elt
);
401 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
402 return fold_build_pointer_plus (elt
, expr
);
403 return fold_build2 (PLUS_EXPR
, type1
,
404 expr
, fold_convert (type1
, elt
));
407 if (scale
.is_minus_one ())
410 return fold_build1 (NEGATE_EXPR
, type1
,
411 fold_convert (type1
, elt
));
413 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
415 elt
= convert_to_ptrofftype (elt
);
416 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
417 return fold_build_pointer_plus (expr
, elt
);
419 return fold_build2 (MINUS_EXPR
, type1
,
420 expr
, fold_convert (type1
, elt
));
423 elt
= fold_convert (type1
, elt
);
425 return fold_build2 (MULT_EXPR
, type1
, elt
,
426 double_int_to_tree (type1
, scale
));
428 if (scale
.is_negative ())
436 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
437 double_int_to_tree (type1
, scale
));
438 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
440 if (code
== MINUS_EXPR
)
441 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
442 return fold_build_pointer_plus (expr
, elt
);
444 return fold_build2 (code
, type1
, expr
, elt
);
447 /* Makes tree from the affine combination COMB. */
450 aff_combination_to_tree (aff_tree
*comb
)
452 tree type
= comb
->type
;
453 tree expr
= NULL_TREE
;
457 if (POINTER_TYPE_P (type
))
460 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
462 for (i
= 0; i
< comb
->n
; i
++)
463 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
467 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, double_int_one
, comb
);
469 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
471 if (comb
->offset
.is_negative ())
474 sgn
= double_int_minus_one
;
479 sgn
= double_int_one
;
481 return add_elt_to_tree (expr
, type
, double_int_to_tree (type1
, off
), sgn
,
485 /* Copies the tree elements of COMB to ensure that they are not shared. */
488 unshare_aff_combination (aff_tree
*comb
)
492 for (i
= 0; i
< comb
->n
; i
++)
493 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
495 comb
->rest
= unshare_expr (comb
->rest
);
498 /* Remove M-th element from COMB. */
501 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
505 comb
->elts
[m
] = comb
->elts
[comb
->n
];
508 comb
->elts
[comb
->n
].coef
= double_int_one
;
509 comb
->elts
[comb
->n
].val
= comb
->rest
;
510 comb
->rest
= NULL_TREE
;
515 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
516 C * COEF is added to R. */
520 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
526 for (i
= 0; i
< c
->n
; i
++)
528 aval
= c
->elts
[i
].val
;
531 type
= TREE_TYPE (aval
);
532 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
533 fold_convert (type
, val
));
536 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
544 type
= TREE_TYPE (aval
);
545 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
546 fold_convert (type
, val
));
549 aff_combination_add_elt (r
, aval
, coef
);
553 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
555 aff_combination_add_cst (r
, coef
* c
->offset
);
558 /* Multiplies C1 by C2, storing the result to R */
561 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
564 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
566 aff_combination_zero (r
, c1
->type
);
568 for (i
= 0; i
< c2
->n
; i
++)
569 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
571 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
572 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
575 /* Returns the element of COMB whose value is VAL, or NULL if no such
576 element exists. If IDX is not NULL, it is set to the index of VAL in
579 static struct aff_comb_elt
*
580 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
584 for (i
= 0; i
< comb
->n
; i
++)
585 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
590 return &comb
->elts
[i
];
596 /* Element of the cache that maps ssa name NAME to its expanded form
597 as an affine expression EXPANSION. */
599 struct name_expansion
603 /* True if the expansion for the name is just being generated. */
604 unsigned in_progress
: 1;
607 /* Expands SSA names in COMB recursively. CACHE is used to cache the
611 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
612 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
615 aff_tree to_add
, current
, curre
;
620 struct name_expansion
*exp
;
622 aff_combination_zero (&to_add
, comb
->type
);
623 for (i
= 0; i
< comb
->n
; i
++)
628 e
= comb
->elts
[i
].val
;
629 type
= TREE_TYPE (e
);
631 /* Look through some conversions. */
632 if (TREE_CODE (e
) == NOP_EXPR
633 && (TYPE_PRECISION (type
)
634 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
635 name
= TREE_OPERAND (e
, 0);
636 if (TREE_CODE (name
) != SSA_NAME
)
638 def
= SSA_NAME_DEF_STMT (name
);
639 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
642 code
= gimple_assign_rhs_code (def
);
644 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
645 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
646 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
649 /* We do not know whether the reference retains its value at the
650 place where the expansion is used. */
651 if (TREE_CODE_CLASS (code
) == tcc_reference
)
655 *cache
= pointer_map_create ();
656 slot
= pointer_map_insert (*cache
, e
);
657 exp
= (struct name_expansion
*) *slot
;
661 exp
= XNEW (struct name_expansion
);
662 exp
->in_progress
= 1;
664 /* In principle this is a generally valid folding, but
665 it is not unconditionally an optimization, so do it
666 here and not in fold_unary. */
667 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
668 than the type of X and overflow for the type of X is
671 && INTEGRAL_TYPE_P (type
)
672 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
673 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
674 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
675 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
676 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
677 rhs
= fold_build2 (code
, type
,
678 fold_convert (type
, gimple_assign_rhs1 (def
)),
679 fold_convert (type
, gimple_assign_rhs2 (def
)));
682 rhs
= gimple_assign_rhs_to_tree (def
);
684 rhs
= fold_convert (type
, rhs
);
686 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
687 exp
->expansion
= current
;
688 exp
->in_progress
= 0;
692 /* Since we follow the definitions in the SSA form, we should not
693 enter a cycle unless we pass through a phi node. */
694 gcc_assert (!exp
->in_progress
);
695 current
= exp
->expansion
;
698 /* Accumulate the new terms to TO_ADD, so that we do not modify
699 COMB while traversing it; include the term -coef * E, to remove
701 scale
= comb
->elts
[i
].coef
;
702 aff_combination_zero (&curre
, comb
->type
);
703 aff_combination_add_elt (&curre
, e
, -scale
);
704 aff_combination_scale (¤t
, scale
);
705 aff_combination_add (&to_add
, ¤t
);
706 aff_combination_add (&to_add
, &curre
);
708 aff_combination_add (comb
, &to_add
);
711 /* Similar to tree_to_aff_combination, but follows SSA name definitions
712 and expands them recursively. CACHE is used to cache the expansions
713 of the ssa names, to avoid exponential time complexity for cases
722 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
723 struct pointer_map_t
**cache
)
725 tree_to_aff_combination (expr
, type
, comb
);
726 aff_combination_expand (comb
, cache
);
729 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
730 pointer_map_traverse. */
733 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
734 void *data ATTRIBUTE_UNUSED
)
736 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
742 /* Frees memory allocated for the CACHE used by
743 tree_to_aff_combination_expand. */
746 free_affine_expand_cache (struct pointer_map_t
**cache
)
751 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
752 pointer_map_destroy (*cache
);
756 /* If VAL != CST * DIV for any constant CST, returns false.
757 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
758 and if they are different, returns false. Finally, if neither of these
759 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
763 double_int_constant_multiple_p (double_int val
, double_int div
,
764 bool *mult_set
, double_int
*mult
)
770 if (*mult_set
&& !mult
->is_zero ())
773 *mult
= double_int_zero
;
780 cst
= val
.sdivmod (div
, FLOOR_DIV_EXPR
, &rem
);
784 if (*mult_set
&& *mult
!= cst
)
792 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
793 X is stored to MULT. */
796 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
799 bool mult_set
= false;
802 if (val
->n
== 0 && val
->offset
.is_zero ())
804 *mult
= double_int_zero
;
807 if (val
->n
!= div
->n
)
810 if (val
->rest
|| div
->rest
)
813 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
817 for (i
= 0; i
< div
->n
; i
++)
819 struct aff_comb_elt
*elt
820 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
823 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
828 gcc_assert (mult_set
);
832 /* Prints the affine VAL to the FILE. */
835 print_aff (FILE *file
, aff_tree
*val
)
838 bool uns
= TYPE_UNSIGNED (val
->type
);
839 if (POINTER_TYPE_P (val
->type
))
841 fprintf (file
, "{\n type = ");
842 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
843 fprintf (file
, "\n offset = ");
844 dump_double_int (file
, val
->offset
, uns
);
847 fprintf (file
, "\n elements = {\n");
848 for (i
= 0; i
< val
->n
; i
++)
850 fprintf (file
, " [%d] = ", i
);
851 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
853 fprintf (file
, " * ");
854 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
856 fprintf (file
, ", \n");
858 fprintf (file
, "\n }");
862 fprintf (file
, "\n rest = ");
863 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
865 fprintf (file
, "\n}");
868 /* Prints the affine VAL to the standard error, used for debugging. */
871 debug_aff (aff_tree
*val
)
873 print_aff (stderr
, val
);
874 fprintf (stderr
, "\n");
877 /* Returns address of the reference REF in ADDR. The size of the accessed
878 location is stored to SIZE. */
881 get_inner_reference_aff (tree ref
, aff_tree
*addr
, double_int
*size
)
883 HOST_WIDE_INT bitsize
, bitpos
;
885 enum machine_mode mode
;
888 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
890 tree base_addr
= build_fold_addr_expr (base
);
892 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
894 tree_to_aff_combination (base_addr
, sizetype
, addr
);
898 tree_to_aff_combination (toff
, sizetype
, &tmp
);
899 aff_combination_add (addr
, &tmp
);
902 aff_combination_const (&tmp
, sizetype
,
903 double_int::from_shwi (bitpos
/ BITS_PER_UNIT
));
904 aff_combination_add (addr
, &tmp
);
906 *size
= double_int::from_shwi ((bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
909 /* Returns true if a region of size SIZE1 at position 0 and a region of
910 size SIZE2 at position DIFF cannot overlap. */
913 aff_comb_cannot_overlap_p (aff_tree
*diff
, double_int size1
, double_int size2
)
917 /* Unless the difference is a constant, we fail. */
922 if (d
.is_negative ())
924 /* The second object is before the first one, we succeed if the last
925 element of the second object is before the start of the first one. */
926 bound
= d
+ size2
+ double_int_minus_one
;
927 return bound
.is_negative ();
931 /* We succeed if the second object starts after the first one ends. */
932 return size1
.sle (d
);