1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "tree-affine.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
36 #include "cfgexpand.h"
37 #include "wide-int-print.h"
39 /* Extends CST as appropriate for the affine combinations COMB. */
42 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
44 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
47 /* Initializes affine combination COMB so that its value is zero in TYPE. */
50 aff_combination_zero (aff_tree
*comb
, tree type
)
56 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
57 comb
->elts
[i
].coef
= 0;
58 comb
->rest
= NULL_TREE
;
61 /* Sets COMB to CST. */
64 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
66 aff_combination_zero (comb
, type
);
67 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
70 /* Sets COMB to single element ELT. */
73 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
75 aff_combination_zero (comb
, type
);
78 comb
->elts
[0].val
= elt
;
79 comb
->elts
[0].coef
= 1;
82 /* Scales COMB by SCALE. */
85 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
89 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
95 aff_combination_zero (comb
, comb
->type
);
99 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
100 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
103 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
104 /* A coefficient may become zero due to overflow. Remove the zero
108 comb
->elts
[j
].coef
= new_coef
;
109 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
116 tree type
= comb
->type
;
117 if (POINTER_TYPE_P (type
))
119 if (comb
->n
< MAX_AFF_ELTS
)
121 comb
->elts
[comb
->n
].coef
= scale
;
122 comb
->elts
[comb
->n
].val
= comb
->rest
;
123 comb
->rest
= NULL_TREE
;
127 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
128 wide_int_to_tree (type
, scale
));
132 /* Adds ELT * SCALE to COMB. */
135 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
140 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
144 for (i
= 0; i
< comb
->n
; i
++)
145 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
148 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
151 comb
->elts
[i
].coef
= new_coef
;
156 comb
->elts
[i
] = comb
->elts
[comb
->n
];
160 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
161 comb
->elts
[comb
->n
].coef
= 1;
162 comb
->elts
[comb
->n
].val
= comb
->rest
;
163 comb
->rest
= NULL_TREE
;
168 if (comb
->n
< MAX_AFF_ELTS
)
170 comb
->elts
[comb
->n
].coef
= scale
;
171 comb
->elts
[comb
->n
].val
= elt
;
177 if (POINTER_TYPE_P (type
))
181 elt
= fold_convert (type
, elt
);
183 elt
= fold_build2 (MULT_EXPR
, type
,
184 fold_convert (type
, elt
),
185 wide_int_to_tree (type
, scale
));
188 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
197 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
199 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
202 /* Adds COMB2 to COMB1. */
205 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
209 aff_combination_add_cst (comb1
, comb2
->offset
);
210 for (i
= 0; i
< comb2
->n
; i
++)
211 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
213 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
216 /* Converts affine combination COMB to TYPE. */
219 aff_combination_convert (aff_tree
*comb
, tree type
)
222 tree comb_type
= comb
->type
;
224 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
226 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
227 tree_to_aff_combination (val
, type
, comb
);
232 if (comb
->rest
&& !POINTER_TYPE_P (type
))
233 comb
->rest
= fold_convert (type
, comb
->rest
);
235 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
238 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
239 for (i
= j
= 0; i
< comb
->n
; i
++)
241 if (comb
->elts
[i
].coef
== 0)
243 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
244 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
249 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
251 comb
->elts
[comb
->n
].coef
= 1;
252 comb
->elts
[comb
->n
].val
= comb
->rest
;
253 comb
->rest
= NULL_TREE
;
258 /* Splits EXPR into an affine combination of parts. */
261 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
265 tree cst
, core
, toffset
;
266 HOST_WIDE_INT bitpos
, bitsize
;
267 enum machine_mode mode
;
268 int unsignedp
, volatilep
;
272 code
= TREE_CODE (expr
);
276 aff_combination_const (comb
, type
, wi::to_widest (expr
));
279 case POINTER_PLUS_EXPR
:
280 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
281 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
282 aff_combination_add (comb
, &tmp
);
287 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
289 if (code
== MINUS_EXPR
)
290 aff_combination_scale (&tmp
, -1);
291 aff_combination_add (comb
, &tmp
);
295 cst
= TREE_OPERAND (expr
, 1);
296 if (TREE_CODE (cst
) != INTEGER_CST
)
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 aff_combination_scale (comb
, wi::to_widest (cst
));
303 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
304 aff_combination_scale (comb
, -1);
309 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
310 aff_combination_scale (comb
, -1);
311 aff_combination_add_cst (comb
, -1);
315 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
316 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
318 expr
= TREE_OPERAND (expr
, 0);
319 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
320 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
321 aff_combination_add (comb
, &tmp
);
324 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
325 &toffset
, &mode
, &unsignedp
, &volatilep
,
327 if (bitpos
% BITS_PER_UNIT
!= 0)
329 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
330 if (TREE_CODE (core
) == MEM_REF
)
332 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
333 core
= TREE_OPERAND (core
, 0);
336 core
= build_fold_addr_expr (core
);
338 if (TREE_CODE (core
) == ADDR_EXPR
)
339 aff_combination_add_elt (comb
, core
, 1);
342 tree_to_aff_combination (core
, type
, &tmp
);
343 aff_combination_add (comb
, &tmp
);
347 tree_to_aff_combination (toffset
, type
, &tmp
);
348 aff_combination_add (comb
, &tmp
);
353 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
354 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
356 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
358 aff_combination_elt (comb
, type
, expr
);
362 aff_combination_elt (comb
, type
,
363 build2 (MEM_REF
, TREE_TYPE (expr
),
364 TREE_OPERAND (expr
, 0),
366 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
367 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
368 aff_combination_add (comb
, &tmp
);
375 aff_combination_elt (comb
, type
, expr
);
378 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
382 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
383 aff_tree
*comb ATTRIBUTE_UNUSED
)
387 if (POINTER_TYPE_P (type
))
390 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
393 && POINTER_TYPE_P (TREE_TYPE (elt
)))
395 elt
= convert_to_ptrofftype (elt
);
396 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
404 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
407 return fold_convert (type1
, elt
);
410 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
411 return fold_build_pointer_plus (expr
, elt
);
412 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
413 return fold_build_pointer_plus (elt
, expr
);
414 return fold_build2 (PLUS_EXPR
, type1
,
415 expr
, fold_convert (type1
, elt
));
421 return fold_build1 (NEGATE_EXPR
, type1
,
422 fold_convert (type1
, elt
));
424 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
426 elt
= convert_to_ptrofftype (elt
);
427 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
428 return fold_build_pointer_plus (expr
, elt
);
430 return fold_build2 (MINUS_EXPR
, type1
,
431 expr
, fold_convert (type1
, elt
));
434 elt
= fold_convert (type1
, elt
);
436 return fold_build2 (MULT_EXPR
, type1
, elt
,
437 wide_int_to_tree (type1
, scale
));
439 if (wi::neg_p (scale
))
447 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
448 wide_int_to_tree (type1
, scale
));
449 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
451 if (code
== MINUS_EXPR
)
452 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
453 return fold_build_pointer_plus (expr
, elt
);
455 return fold_build2 (code
, type1
, expr
, elt
);
458 /* Makes tree from the affine combination COMB. */
461 aff_combination_to_tree (aff_tree
*comb
)
463 tree type
= comb
->type
;
464 tree expr
= NULL_TREE
;
468 if (POINTER_TYPE_P (type
))
471 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
473 for (i
= 0; i
< comb
->n
; i
++)
474 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
478 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
480 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
482 if (wi::neg_p (comb
->offset
))
492 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
496 /* Copies the tree elements of COMB to ensure that they are not shared. */
499 unshare_aff_combination (aff_tree
*comb
)
503 for (i
= 0; i
< comb
->n
; i
++)
504 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
506 comb
->rest
= unshare_expr (comb
->rest
);
509 /* Remove M-th element from COMB. */
512 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
516 comb
->elts
[m
] = comb
->elts
[comb
->n
];
519 comb
->elts
[comb
->n
].coef
= 1;
520 comb
->elts
[comb
->n
].val
= comb
->rest
;
521 comb
->rest
= NULL_TREE
;
526 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
527 C * COEF is added to R. */
531 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
537 for (i
= 0; i
< c
->n
; i
++)
539 aval
= c
->elts
[i
].val
;
542 type
= TREE_TYPE (aval
);
543 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
544 fold_convert (type
, val
));
547 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
555 type
= TREE_TYPE (aval
);
556 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
557 fold_convert (type
, val
));
560 aff_combination_add_elt (r
, aval
, coef
);
564 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
566 aff_combination_add_cst (r
, coef
* c
->offset
);
569 /* Multiplies C1 by C2, storing the result to R */
572 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
575 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
577 aff_combination_zero (r
, c1
->type
);
579 for (i
= 0; i
< c2
->n
; i
++)
580 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
582 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
583 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
586 /* Returns the element of COMB whose value is VAL, or NULL if no such
587 element exists. If IDX is not NULL, it is set to the index of VAL in
590 static struct aff_comb_elt
*
591 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
595 for (i
= 0; i
< comb
->n
; i
++)
596 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
601 return &comb
->elts
[i
];
607 /* Element of the cache that maps ssa name NAME to its expanded form
608 as an affine expression EXPANSION. */
610 struct name_expansion
614 /* True if the expansion for the name is just being generated. */
615 unsigned in_progress
: 1;
618 /* Expands SSA names in COMB recursively. CACHE is used to cache the
622 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
623 hash_map
<tree
, name_expansion
*> **cache
)
626 aff_tree to_add
, current
, curre
;
630 struct name_expansion
*exp
;
632 aff_combination_zero (&to_add
, comb
->type
);
633 for (i
= 0; i
< comb
->n
; i
++)
638 e
= comb
->elts
[i
].val
;
639 type
= TREE_TYPE (e
);
641 /* Look through some conversions. */
642 if (TREE_CODE (e
) == NOP_EXPR
643 && (TYPE_PRECISION (type
)
644 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
645 name
= TREE_OPERAND (e
, 0);
646 if (TREE_CODE (name
) != SSA_NAME
)
648 def
= SSA_NAME_DEF_STMT (name
);
649 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
652 code
= gimple_assign_rhs_code (def
);
654 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
655 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
656 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
659 /* We do not know whether the reference retains its value at the
660 place where the expansion is used. */
661 if (TREE_CODE_CLASS (code
) == tcc_reference
)
665 *cache
= new hash_map
<tree
, name_expansion
*>;
666 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
671 exp
= XNEW (struct name_expansion
);
672 exp
->in_progress
= 1;
674 /* In principle this is a generally valid folding, but
675 it is not unconditionally an optimization, so do it
676 here and not in fold_unary. */
677 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
678 than the type of X and overflow for the type of X is
681 && INTEGRAL_TYPE_P (type
)
682 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
683 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
684 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
685 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
686 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
687 rhs
= fold_build2 (code
, type
,
688 fold_convert (type
, gimple_assign_rhs1 (def
)),
689 fold_convert (type
, gimple_assign_rhs2 (def
)));
692 rhs
= gimple_assign_rhs_to_tree (def
);
694 rhs
= fold_convert (type
, rhs
);
696 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
697 exp
->expansion
= current
;
698 exp
->in_progress
= 0;
702 /* Since we follow the definitions in the SSA form, we should not
703 enter a cycle unless we pass through a phi node. */
704 gcc_assert (!exp
->in_progress
);
705 current
= exp
->expansion
;
708 /* Accumulate the new terms to TO_ADD, so that we do not modify
709 COMB while traversing it; include the term -coef * E, to remove
711 scale
= comb
->elts
[i
].coef
;
712 aff_combination_zero (&curre
, comb
->type
);
713 aff_combination_add_elt (&curre
, e
, -scale
);
714 aff_combination_scale (¤t
, scale
);
715 aff_combination_add (&to_add
, ¤t
);
716 aff_combination_add (&to_add
, &curre
);
718 aff_combination_add (comb
, &to_add
);
721 /* Similar to tree_to_aff_combination, but follows SSA name definitions
722 and expands them recursively. CACHE is used to cache the expansions
723 of the ssa names, to avoid exponential time complexity for cases
732 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
733 hash_map
<tree
, name_expansion
*> **cache
)
735 tree_to_aff_combination (expr
, type
, comb
);
736 aff_combination_expand (comb
, cache
);
739 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
740 hash_map::traverse. */
743 free_name_expansion (tree
const &, name_expansion
**value
, void *)
749 /* Frees memory allocated for the CACHE used by
750 tree_to_aff_combination_expand. */
753 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
758 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
763 /* If VAL != CST * DIV for any constant CST, returns false.
764 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
765 and if they are different, returns false. Finally, if neither of these
766 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
770 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
771 bool *mult_set
, widest_int
*mult
)
777 if (*mult_set
&& mult
!= 0)
787 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
790 if (*mult_set
&& *mult
!= cst
)
798 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
799 X is stored to MULT. */
802 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
805 bool mult_set
= false;
808 if (val
->n
== 0 && val
->offset
== 0)
813 if (val
->n
!= div
->n
)
816 if (val
->rest
|| div
->rest
)
819 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
823 for (i
= 0; i
< div
->n
; i
++)
825 struct aff_comb_elt
*elt
826 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
829 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
834 gcc_assert (mult_set
);
838 /* Prints the affine VAL to the FILE. */
841 print_aff (FILE *file
, aff_tree
*val
)
844 signop sgn
= TYPE_SIGN (val
->type
);
845 if (POINTER_TYPE_P (val
->type
))
847 fprintf (file
, "{\n type = ");
848 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
849 fprintf (file
, "\n offset = ");
850 print_dec (val
->offset
, file
, sgn
);
853 fprintf (file
, "\n elements = {\n");
854 for (i
= 0; i
< val
->n
; i
++)
856 fprintf (file
, " [%d] = ", i
);
857 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
859 fprintf (file
, " * ");
860 print_dec (val
->elts
[i
].coef
, file
, sgn
);
862 fprintf (file
, ", \n");
864 fprintf (file
, "\n }");
868 fprintf (file
, "\n rest = ");
869 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
871 fprintf (file
, "\n}");
874 /* Prints the affine VAL to the standard error, used for debugging. */
877 debug_aff (aff_tree
*val
)
879 print_aff (stderr
, val
);
880 fprintf (stderr
, "\n");
883 /* Computes address of the reference REF in ADDR. The size of the accessed
884 location is stored to SIZE. Returns the ultimate containing object to
888 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
890 HOST_WIDE_INT bitsize
, bitpos
;
892 enum machine_mode mode
;
895 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
897 tree base_addr
= build_fold_addr_expr (base
);
899 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
901 tree_to_aff_combination (base_addr
, sizetype
, addr
);
905 tree_to_aff_combination (toff
, sizetype
, &tmp
);
906 aff_combination_add (addr
, &tmp
);
909 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
910 aff_combination_add (addr
, &tmp
);
912 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
917 /* Returns true if a region of size SIZE1 at position 0 and a region of
918 size SIZE2 at position DIFF cannot overlap. */
921 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
922 const widest_int
&size2
)
924 /* Unless the difference is a constant, we fail. */
928 if (wi::neg_p (diff
->offset
))
930 /* The second object is before the first one, we succeed if the last
931 element of the second object is before the start of the first one. */
932 return wi::neg_p (diff
->offset
+ size2
- 1);
936 /* We succeed if the second object starts after the first one ends. */
937 return wi::les_p (size1
, diff
->offset
);