1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pretty-print.h"
28 #include "fold-const.h"
29 #include "tree-affine.h"
32 #include "cfgexpand.h"
34 /* Extends CST as appropriate for the affine combinations COMB. */
37 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
39 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
42 /* Initializes affine combination COMB so that its value is zero in TYPE. */
45 aff_combination_zero (aff_tree
*comb
, tree type
)
51 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
52 comb
->elts
[i
].coef
= 0;
53 comb
->rest
= NULL_TREE
;
56 /* Sets COMB to CST. */
59 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
61 aff_combination_zero (comb
, type
);
62 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
65 /* Sets COMB to single element ELT. */
68 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
70 aff_combination_zero (comb
, type
);
73 comb
->elts
[0].val
= elt
;
74 comb
->elts
[0].coef
= 1;
77 /* Scales COMB by SCALE. */
80 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
84 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
90 aff_combination_zero (comb
, comb
->type
);
94 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
95 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
98 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
99 /* A coefficient may become zero due to overflow. Remove the zero
103 comb
->elts
[j
].coef
= new_coef
;
104 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
111 tree type
= comb
->type
;
112 if (POINTER_TYPE_P (type
))
114 if (comb
->n
< MAX_AFF_ELTS
)
116 comb
->elts
[comb
->n
].coef
= scale
;
117 comb
->elts
[comb
->n
].val
= comb
->rest
;
118 comb
->rest
= NULL_TREE
;
122 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
123 wide_int_to_tree (type
, scale
));
127 /* Adds ELT * SCALE to COMB. */
130 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
135 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
139 for (i
= 0; i
< comb
->n
; i
++)
140 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
143 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
146 comb
->elts
[i
].coef
= new_coef
;
151 comb
->elts
[i
] = comb
->elts
[comb
->n
];
155 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
156 comb
->elts
[comb
->n
].coef
= 1;
157 comb
->elts
[comb
->n
].val
= comb
->rest
;
158 comb
->rest
= NULL_TREE
;
163 if (comb
->n
< MAX_AFF_ELTS
)
165 comb
->elts
[comb
->n
].coef
= scale
;
166 comb
->elts
[comb
->n
].val
= elt
;
172 if (POINTER_TYPE_P (type
))
176 elt
= fold_convert (type
, elt
);
178 elt
= fold_build2 (MULT_EXPR
, type
,
179 fold_convert (type
, elt
),
180 wide_int_to_tree (type
, scale
));
183 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
192 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
194 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
197 /* Adds COMB2 to COMB1. */
200 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
204 aff_combination_add_cst (comb1
, comb2
->offset
);
205 for (i
= 0; i
< comb2
->n
; i
++)
206 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
208 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
211 /* Converts affine combination COMB to TYPE. */
214 aff_combination_convert (aff_tree
*comb
, tree type
)
217 tree comb_type
= comb
->type
;
219 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
221 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
222 tree_to_aff_combination (val
, type
, comb
);
227 if (comb
->rest
&& !POINTER_TYPE_P (type
))
228 comb
->rest
= fold_convert (type
, comb
->rest
);
230 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
233 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
234 for (i
= j
= 0; i
< comb
->n
; i
++)
236 if (comb
->elts
[i
].coef
== 0)
238 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
239 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
244 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
246 comb
->elts
[comb
->n
].coef
= 1;
247 comb
->elts
[comb
->n
].val
= comb
->rest
;
248 comb
->rest
= NULL_TREE
;
253 /* Splits EXPR into an affine combination of parts. */
256 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
260 tree cst
, core
, toffset
;
261 HOST_WIDE_INT bitpos
, bitsize
;
263 int unsignedp
, reversep
, volatilep
;
267 code
= TREE_CODE (expr
);
271 aff_combination_const (comb
, type
, wi::to_widest (expr
));
274 case POINTER_PLUS_EXPR
:
275 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
276 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
277 aff_combination_add (comb
, &tmp
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
283 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
284 if (code
== MINUS_EXPR
)
285 aff_combination_scale (&tmp
, -1);
286 aff_combination_add (comb
, &tmp
);
290 cst
= TREE_OPERAND (expr
, 1);
291 if (TREE_CODE (cst
) != INTEGER_CST
)
293 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
294 aff_combination_scale (comb
, wi::to_widest (cst
));
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 aff_combination_scale (comb
, -1);
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, -1);
306 aff_combination_add_cst (comb
, -1);
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
313 expr
= TREE_OPERAND (expr
, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
316 aff_combination_add (comb
, &tmp
);
319 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
320 &toffset
, &mode
, &unsignedp
, &reversep
,
322 if (bitpos
% BITS_PER_UNIT
!= 0)
324 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
325 if (TREE_CODE (core
) == MEM_REF
)
327 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
328 core
= TREE_OPERAND (core
, 0);
331 core
= build_fold_addr_expr (core
);
333 if (TREE_CODE (core
) == ADDR_EXPR
)
334 aff_combination_add_elt (comb
, core
, 1);
337 tree_to_aff_combination (core
, type
, &tmp
);
338 aff_combination_add (comb
, &tmp
);
342 tree_to_aff_combination (toffset
, type
, &tmp
);
343 aff_combination_add (comb
, &tmp
);
348 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
349 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
351 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
353 aff_combination_elt (comb
, type
, expr
);
357 aff_combination_elt (comb
, type
,
358 build2 (MEM_REF
, TREE_TYPE (expr
),
359 TREE_OPERAND (expr
, 0),
361 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
362 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
363 aff_combination_add (comb
, &tmp
);
370 aff_combination_elt (comb
, type
, expr
);
373 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
377 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
378 aff_tree
*comb ATTRIBUTE_UNUSED
)
382 if (POINTER_TYPE_P (type
))
385 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
388 && POINTER_TYPE_P (TREE_TYPE (elt
)))
390 elt
= convert_to_ptrofftype (elt
);
391 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
399 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
402 return fold_convert (type1
, elt
);
405 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
406 return fold_build_pointer_plus (expr
, elt
);
407 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
408 return fold_build_pointer_plus (elt
, expr
);
409 return fold_build2 (PLUS_EXPR
, type1
,
410 expr
, fold_convert (type1
, elt
));
416 return fold_build1 (NEGATE_EXPR
, type1
,
417 fold_convert (type1
, elt
));
419 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
421 elt
= convert_to_ptrofftype (elt
);
422 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
423 return fold_build_pointer_plus (expr
, elt
);
425 return fold_build2 (MINUS_EXPR
, type1
,
426 expr
, fold_convert (type1
, elt
));
429 elt
= fold_convert (type1
, elt
);
431 return fold_build2 (MULT_EXPR
, type1
, elt
,
432 wide_int_to_tree (type1
, scale
));
434 if (wi::neg_p (scale
))
442 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
443 wide_int_to_tree (type1
, scale
));
444 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
446 if (code
== MINUS_EXPR
)
447 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
448 return fold_build_pointer_plus (expr
, elt
);
450 return fold_build2 (code
, type1
, expr
, elt
);
453 /* Makes tree from the affine combination COMB. */
456 aff_combination_to_tree (aff_tree
*comb
)
458 tree type
= comb
->type
;
459 tree expr
= NULL_TREE
;
463 if (POINTER_TYPE_P (type
))
466 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
468 for (i
= 0; i
< comb
->n
; i
++)
469 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
473 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
475 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
477 if (wi::neg_p (comb
->offset
))
487 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
491 /* Copies the tree elements of COMB to ensure that they are not shared. */
494 unshare_aff_combination (aff_tree
*comb
)
498 for (i
= 0; i
< comb
->n
; i
++)
499 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
501 comb
->rest
= unshare_expr (comb
->rest
);
504 /* Remove M-th element from COMB. */
507 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
511 comb
->elts
[m
] = comb
->elts
[comb
->n
];
514 comb
->elts
[comb
->n
].coef
= 1;
515 comb
->elts
[comb
->n
].val
= comb
->rest
;
516 comb
->rest
= NULL_TREE
;
521 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
522 C * COEF is added to R. */
526 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
532 for (i
= 0; i
< c
->n
; i
++)
534 aval
= c
->elts
[i
].val
;
537 type
= TREE_TYPE (aval
);
538 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
539 fold_convert (type
, val
));
542 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
550 type
= TREE_TYPE (aval
);
551 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
552 fold_convert (type
, val
));
555 aff_combination_add_elt (r
, aval
, coef
);
559 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
561 aff_combination_add_cst (r
, coef
* c
->offset
);
564 /* Multiplies C1 by C2, storing the result to R */
567 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
570 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
572 aff_combination_zero (r
, c1
->type
);
574 for (i
= 0; i
< c2
->n
; i
++)
575 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
577 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
578 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
581 /* Returns the element of COMB whose value is VAL, or NULL if no such
582 element exists. If IDX is not NULL, it is set to the index of VAL in
585 static struct aff_comb_elt
*
586 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
590 for (i
= 0; i
< comb
->n
; i
++)
591 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
596 return &comb
->elts
[i
];
602 /* Element of the cache that maps ssa name NAME to its expanded form
603 as an affine expression EXPANSION. */
605 struct name_expansion
609 /* True if the expansion for the name is just being generated. */
610 unsigned in_progress
: 1;
613 /* Expands SSA names in COMB recursively. CACHE is used to cache the
617 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
618 hash_map
<tree
, name_expansion
*> **cache
)
621 aff_tree to_add
, current
, curre
;
625 struct name_expansion
*exp
;
627 aff_combination_zero (&to_add
, comb
->type
);
628 for (i
= 0; i
< comb
->n
; i
++)
633 e
= comb
->elts
[i
].val
;
634 type
= TREE_TYPE (e
);
636 /* Look through some conversions. */
637 if (CONVERT_EXPR_P (e
)
638 && (TYPE_PRECISION (type
)
639 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
640 name
= TREE_OPERAND (e
, 0);
641 if (TREE_CODE (name
) != SSA_NAME
)
643 def
= SSA_NAME_DEF_STMT (name
);
644 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
647 code
= gimple_assign_rhs_code (def
);
649 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
650 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
651 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
654 /* We do not know whether the reference retains its value at the
655 place where the expansion is used. */
656 if (TREE_CODE_CLASS (code
) == tcc_reference
)
660 *cache
= new hash_map
<tree
, name_expansion
*>;
661 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
666 exp
= XNEW (struct name_expansion
);
667 exp
->in_progress
= 1;
669 /* In principle this is a generally valid folding, but
670 it is not unconditionally an optimization, so do it
671 here and not in fold_unary. */
672 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
673 than the type of X and overflow for the type of X is
676 && INTEGRAL_TYPE_P (type
)
677 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
678 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
679 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
680 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
681 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
682 rhs
= fold_build2 (code
, type
,
683 fold_convert (type
, gimple_assign_rhs1 (def
)),
684 fold_convert (type
, gimple_assign_rhs2 (def
)));
687 rhs
= gimple_assign_rhs_to_tree (def
);
689 rhs
= fold_convert (type
, rhs
);
691 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
692 exp
->expansion
= current
;
693 exp
->in_progress
= 0;
697 /* Since we follow the definitions in the SSA form, we should not
698 enter a cycle unless we pass through a phi node. */
699 gcc_assert (!exp
->in_progress
);
700 current
= exp
->expansion
;
703 /* Accumulate the new terms to TO_ADD, so that we do not modify
704 COMB while traversing it; include the term -coef * E, to remove
706 scale
= comb
->elts
[i
].coef
;
707 aff_combination_zero (&curre
, comb
->type
);
708 aff_combination_add_elt (&curre
, e
, -scale
);
709 aff_combination_scale (¤t
, scale
);
710 aff_combination_add (&to_add
, ¤t
);
711 aff_combination_add (&to_add
, &curre
);
713 aff_combination_add (comb
, &to_add
);
716 /* Similar to tree_to_aff_combination, but follows SSA name definitions
717 and expands them recursively. CACHE is used to cache the expansions
718 of the ssa names, to avoid exponential time complexity for cases
727 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
728 hash_map
<tree
, name_expansion
*> **cache
)
730 tree_to_aff_combination (expr
, type
, comb
);
731 aff_combination_expand (comb
, cache
);
734 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
735 hash_map::traverse. */
738 free_name_expansion (tree
const &, name_expansion
**value
, void *)
744 /* Frees memory allocated for the CACHE used by
745 tree_to_aff_combination_expand. */
748 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
753 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
758 /* If VAL != CST * DIV for any constant CST, returns false.
759 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
760 and if they are different, returns false. Finally, if neither of these
761 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
765 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
766 bool *mult_set
, widest_int
*mult
)
772 if (*mult_set
&& mult
!= 0)
782 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
785 if (*mult_set
&& *mult
!= cst
)
793 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
794 X is stored to MULT. */
797 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
800 bool mult_set
= false;
803 if (val
->n
== 0 && val
->offset
== 0)
808 if (val
->n
!= div
->n
)
811 if (val
->rest
|| div
->rest
)
814 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
818 for (i
= 0; i
< div
->n
; i
++)
820 struct aff_comb_elt
*elt
821 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
824 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
829 gcc_assert (mult_set
);
833 /* Prints the affine VAL to the FILE. */
836 print_aff (FILE *file
, aff_tree
*val
)
839 signop sgn
= TYPE_SIGN (val
->type
);
840 if (POINTER_TYPE_P (val
->type
))
842 fprintf (file
, "{\n type = ");
843 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
844 fprintf (file
, "\n offset = ");
845 print_dec (val
->offset
, file
, sgn
);
848 fprintf (file
, "\n elements = {\n");
849 for (i
= 0; i
< val
->n
; i
++)
851 fprintf (file
, " [%d] = ", i
);
852 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
854 fprintf (file
, " * ");
855 print_dec (val
->elts
[i
].coef
, file
, sgn
);
857 fprintf (file
, ", \n");
859 fprintf (file
, "\n }");
863 fprintf (file
, "\n rest = ");
864 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
866 fprintf (file
, "\n}");
869 /* Prints the affine VAL to the standard error, used for debugging. */
872 debug_aff (aff_tree
*val
)
874 print_aff (stderr
, val
);
875 fprintf (stderr
, "\n");
878 /* Computes address of the reference REF in ADDR. The size of the accessed
879 location is stored to SIZE. Returns the ultimate containing object to
883 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
885 HOST_WIDE_INT bitsize
, bitpos
;
890 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
891 &uns
, &rev
, &vol
, false);
892 tree base_addr
= build_fold_addr_expr (base
);
894 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
896 tree_to_aff_combination (base_addr
, sizetype
, addr
);
900 tree_to_aff_combination (toff
, sizetype
, &tmp
);
901 aff_combination_add (addr
, &tmp
);
904 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
905 aff_combination_add (addr
, &tmp
);
907 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
912 /* Returns true if a region of size SIZE1 at position 0 and a region of
913 size SIZE2 at position DIFF cannot overlap. */
916 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
917 const widest_int
&size2
)
919 /* Unless the difference is a constant, we fail. */
923 if (wi::neg_p (diff
->offset
))
925 /* The second object is before the first one, we succeed if the last
926 element of the second object is before the start of the first one. */
927 return wi::neg_p (diff
->offset
+ size2
- 1);
931 /* We succeed if the second object starts after the first one ends. */
932 return wi::les_p (size1
, diff
->offset
);