1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
29 #include "hard-reg-set.h"
33 #include "insn-config.h"
42 #include "tree-pretty-print.h"
43 #include "tree-affine.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
52 #include "cfgexpand.h"
53 #include "wide-int-print.h"
55 /* Extends CST as appropriate for the affine combinations COMB. */
58 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
60 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
63 /* Initializes affine combination COMB so that its value is zero in TYPE. */
66 aff_combination_zero (aff_tree
*comb
, tree type
)
72 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
73 comb
->elts
[i
].coef
= 0;
74 comb
->rest
= NULL_TREE
;
77 /* Sets COMB to CST. */
80 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
82 aff_combination_zero (comb
, type
);
83 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
86 /* Sets COMB to single element ELT. */
89 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
91 aff_combination_zero (comb
, type
);
94 comb
->elts
[0].val
= elt
;
95 comb
->elts
[0].coef
= 1;
98 /* Scales COMB by SCALE. */
101 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
105 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
111 aff_combination_zero (comb
, comb
->type
);
115 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
116 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
119 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
120 /* A coefficient may become zero due to overflow. Remove the zero
124 comb
->elts
[j
].coef
= new_coef
;
125 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
132 tree type
= comb
->type
;
133 if (POINTER_TYPE_P (type
))
135 if (comb
->n
< MAX_AFF_ELTS
)
137 comb
->elts
[comb
->n
].coef
= scale
;
138 comb
->elts
[comb
->n
].val
= comb
->rest
;
139 comb
->rest
= NULL_TREE
;
143 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
144 wide_int_to_tree (type
, scale
));
148 /* Adds ELT * SCALE to COMB. */
151 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
156 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
160 for (i
= 0; i
< comb
->n
; i
++)
161 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
164 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
167 comb
->elts
[i
].coef
= new_coef
;
172 comb
->elts
[i
] = comb
->elts
[comb
->n
];
176 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
177 comb
->elts
[comb
->n
].coef
= 1;
178 comb
->elts
[comb
->n
].val
= comb
->rest
;
179 comb
->rest
= NULL_TREE
;
184 if (comb
->n
< MAX_AFF_ELTS
)
186 comb
->elts
[comb
->n
].coef
= scale
;
187 comb
->elts
[comb
->n
].val
= elt
;
193 if (POINTER_TYPE_P (type
))
197 elt
= fold_convert (type
, elt
);
199 elt
= fold_build2 (MULT_EXPR
, type
,
200 fold_convert (type
, elt
),
201 wide_int_to_tree (type
, scale
));
204 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
213 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
215 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
218 /* Adds COMB2 to COMB1. */
221 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
225 aff_combination_add_cst (comb1
, comb2
->offset
);
226 for (i
= 0; i
< comb2
->n
; i
++)
227 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
229 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
232 /* Converts affine combination COMB to TYPE. */
235 aff_combination_convert (aff_tree
*comb
, tree type
)
238 tree comb_type
= comb
->type
;
240 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
242 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
243 tree_to_aff_combination (val
, type
, comb
);
248 if (comb
->rest
&& !POINTER_TYPE_P (type
))
249 comb
->rest
= fold_convert (type
, comb
->rest
);
251 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
254 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
255 for (i
= j
= 0; i
< comb
->n
; i
++)
257 if (comb
->elts
[i
].coef
== 0)
259 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
260 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
265 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
267 comb
->elts
[comb
->n
].coef
= 1;
268 comb
->elts
[comb
->n
].val
= comb
->rest
;
269 comb
->rest
= NULL_TREE
;
274 /* Splits EXPR into an affine combination of parts. */
277 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
281 tree cst
, core
, toffset
;
282 HOST_WIDE_INT bitpos
, bitsize
;
284 int unsignedp
, volatilep
;
288 code
= TREE_CODE (expr
);
292 aff_combination_const (comb
, type
, wi::to_widest (expr
));
295 case POINTER_PLUS_EXPR
:
296 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
297 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
298 aff_combination_add (comb
, &tmp
);
303 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
304 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
305 if (code
== MINUS_EXPR
)
306 aff_combination_scale (&tmp
, -1);
307 aff_combination_add (comb
, &tmp
);
311 cst
= TREE_OPERAND (expr
, 1);
312 if (TREE_CODE (cst
) != INTEGER_CST
)
314 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
315 aff_combination_scale (comb
, wi::to_widest (cst
));
319 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
320 aff_combination_scale (comb
, -1);
325 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
326 aff_combination_scale (comb
, -1);
327 aff_combination_add_cst (comb
, -1);
331 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
332 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
334 expr
= TREE_OPERAND (expr
, 0);
335 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
336 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
337 aff_combination_add (comb
, &tmp
);
340 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
341 &toffset
, &mode
, &unsignedp
, &volatilep
,
343 if (bitpos
% BITS_PER_UNIT
!= 0)
345 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
346 if (TREE_CODE (core
) == MEM_REF
)
348 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
349 core
= TREE_OPERAND (core
, 0);
352 core
= build_fold_addr_expr (core
);
354 if (TREE_CODE (core
) == ADDR_EXPR
)
355 aff_combination_add_elt (comb
, core
, 1);
358 tree_to_aff_combination (core
, type
, &tmp
);
359 aff_combination_add (comb
, &tmp
);
363 tree_to_aff_combination (toffset
, type
, &tmp
);
364 aff_combination_add (comb
, &tmp
);
369 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
370 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
372 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
374 aff_combination_elt (comb
, type
, expr
);
378 aff_combination_elt (comb
, type
,
379 build2 (MEM_REF
, TREE_TYPE (expr
),
380 TREE_OPERAND (expr
, 0),
382 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
383 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
384 aff_combination_add (comb
, &tmp
);
391 aff_combination_elt (comb
, type
, expr
);
394 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
398 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
399 aff_tree
*comb ATTRIBUTE_UNUSED
)
403 if (POINTER_TYPE_P (type
))
406 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
409 && POINTER_TYPE_P (TREE_TYPE (elt
)))
411 elt
= convert_to_ptrofftype (elt
);
412 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
420 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
423 return fold_convert (type1
, elt
);
426 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
427 return fold_build_pointer_plus (expr
, elt
);
428 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
429 return fold_build_pointer_plus (elt
, expr
);
430 return fold_build2 (PLUS_EXPR
, type1
,
431 expr
, fold_convert (type1
, elt
));
437 return fold_build1 (NEGATE_EXPR
, type1
,
438 fold_convert (type1
, elt
));
440 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
442 elt
= convert_to_ptrofftype (elt
);
443 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
444 return fold_build_pointer_plus (expr
, elt
);
446 return fold_build2 (MINUS_EXPR
, type1
,
447 expr
, fold_convert (type1
, elt
));
450 elt
= fold_convert (type1
, elt
);
452 return fold_build2 (MULT_EXPR
, type1
, elt
,
453 wide_int_to_tree (type1
, scale
));
455 if (wi::neg_p (scale
))
463 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
464 wide_int_to_tree (type1
, scale
));
465 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
467 if (code
== MINUS_EXPR
)
468 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
469 return fold_build_pointer_plus (expr
, elt
);
471 return fold_build2 (code
, type1
, expr
, elt
);
474 /* Makes tree from the affine combination COMB. */
477 aff_combination_to_tree (aff_tree
*comb
)
479 tree type
= comb
->type
;
480 tree expr
= NULL_TREE
;
484 if (POINTER_TYPE_P (type
))
487 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
489 for (i
= 0; i
< comb
->n
; i
++)
490 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
494 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
496 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
498 if (wi::neg_p (comb
->offset
))
508 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
512 /* Copies the tree elements of COMB to ensure that they are not shared. */
515 unshare_aff_combination (aff_tree
*comb
)
519 for (i
= 0; i
< comb
->n
; i
++)
520 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
522 comb
->rest
= unshare_expr (comb
->rest
);
525 /* Remove M-th element from COMB. */
528 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
532 comb
->elts
[m
] = comb
->elts
[comb
->n
];
535 comb
->elts
[comb
->n
].coef
= 1;
536 comb
->elts
[comb
->n
].val
= comb
->rest
;
537 comb
->rest
= NULL_TREE
;
542 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
543 C * COEF is added to R. */
547 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
553 for (i
= 0; i
< c
->n
; i
++)
555 aval
= c
->elts
[i
].val
;
558 type
= TREE_TYPE (aval
);
559 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
560 fold_convert (type
, val
));
563 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
571 type
= TREE_TYPE (aval
);
572 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
573 fold_convert (type
, val
));
576 aff_combination_add_elt (r
, aval
, coef
);
580 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
582 aff_combination_add_cst (r
, coef
* c
->offset
);
585 /* Multiplies C1 by C2, storing the result to R */
588 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
591 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
593 aff_combination_zero (r
, c1
->type
);
595 for (i
= 0; i
< c2
->n
; i
++)
596 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
598 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
599 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
602 /* Returns the element of COMB whose value is VAL, or NULL if no such
603 element exists. If IDX is not NULL, it is set to the index of VAL in
606 static struct aff_comb_elt
*
607 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
611 for (i
= 0; i
< comb
->n
; i
++)
612 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
617 return &comb
->elts
[i
];
623 /* Element of the cache that maps ssa name NAME to its expanded form
624 as an affine expression EXPANSION. */
626 struct name_expansion
630 /* True if the expansion for the name is just being generated. */
631 unsigned in_progress
: 1;
634 /* Expands SSA names in COMB recursively. CACHE is used to cache the
638 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
639 hash_map
<tree
, name_expansion
*> **cache
)
642 aff_tree to_add
, current
, curre
;
646 struct name_expansion
*exp
;
648 aff_combination_zero (&to_add
, comb
->type
);
649 for (i
= 0; i
< comb
->n
; i
++)
654 e
= comb
->elts
[i
].val
;
655 type
= TREE_TYPE (e
);
657 /* Look through some conversions. */
658 if (CONVERT_EXPR_P (e
)
659 && (TYPE_PRECISION (type
)
660 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
661 name
= TREE_OPERAND (e
, 0);
662 if (TREE_CODE (name
) != SSA_NAME
)
664 def
= SSA_NAME_DEF_STMT (name
);
665 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
668 code
= gimple_assign_rhs_code (def
);
670 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
671 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
672 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
675 /* We do not know whether the reference retains its value at the
676 place where the expansion is used. */
677 if (TREE_CODE_CLASS (code
) == tcc_reference
)
681 *cache
= new hash_map
<tree
, name_expansion
*>;
682 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
687 exp
= XNEW (struct name_expansion
);
688 exp
->in_progress
= 1;
690 /* In principle this is a generally valid folding, but
691 it is not unconditionally an optimization, so do it
692 here and not in fold_unary. */
693 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
694 than the type of X and overflow for the type of X is
697 && INTEGRAL_TYPE_P (type
)
698 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
699 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
700 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
701 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
702 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
703 rhs
= fold_build2 (code
, type
,
704 fold_convert (type
, gimple_assign_rhs1 (def
)),
705 fold_convert (type
, gimple_assign_rhs2 (def
)));
708 rhs
= gimple_assign_rhs_to_tree (def
);
710 rhs
= fold_convert (type
, rhs
);
712 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
713 exp
->expansion
= current
;
714 exp
->in_progress
= 0;
718 /* Since we follow the definitions in the SSA form, we should not
719 enter a cycle unless we pass through a phi node. */
720 gcc_assert (!exp
->in_progress
);
721 current
= exp
->expansion
;
724 /* Accumulate the new terms to TO_ADD, so that we do not modify
725 COMB while traversing it; include the term -coef * E, to remove
727 scale
= comb
->elts
[i
].coef
;
728 aff_combination_zero (&curre
, comb
->type
);
729 aff_combination_add_elt (&curre
, e
, -scale
);
730 aff_combination_scale (¤t
, scale
);
731 aff_combination_add (&to_add
, ¤t
);
732 aff_combination_add (&to_add
, &curre
);
734 aff_combination_add (comb
, &to_add
);
737 /* Similar to tree_to_aff_combination, but follows SSA name definitions
738 and expands them recursively. CACHE is used to cache the expansions
739 of the ssa names, to avoid exponential time complexity for cases
748 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
749 hash_map
<tree
, name_expansion
*> **cache
)
751 tree_to_aff_combination (expr
, type
, comb
);
752 aff_combination_expand (comb
, cache
);
755 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
756 hash_map::traverse. */
759 free_name_expansion (tree
const &, name_expansion
**value
, void *)
765 /* Frees memory allocated for the CACHE used by
766 tree_to_aff_combination_expand. */
769 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
774 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
779 /* If VAL != CST * DIV for any constant CST, returns false.
780 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
781 and if they are different, returns false. Finally, if neither of these
782 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
786 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
787 bool *mult_set
, widest_int
*mult
)
793 if (*mult_set
&& mult
!= 0)
803 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
806 if (*mult_set
&& *mult
!= cst
)
814 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
815 X is stored to MULT. */
818 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
821 bool mult_set
= false;
824 if (val
->n
== 0 && val
->offset
== 0)
829 if (val
->n
!= div
->n
)
832 if (val
->rest
|| div
->rest
)
835 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
839 for (i
= 0; i
< div
->n
; i
++)
841 struct aff_comb_elt
*elt
842 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
845 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
850 gcc_assert (mult_set
);
854 /* Prints the affine VAL to the FILE. */
857 print_aff (FILE *file
, aff_tree
*val
)
860 signop sgn
= TYPE_SIGN (val
->type
);
861 if (POINTER_TYPE_P (val
->type
))
863 fprintf (file
, "{\n type = ");
864 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
865 fprintf (file
, "\n offset = ");
866 print_dec (val
->offset
, file
, sgn
);
869 fprintf (file
, "\n elements = {\n");
870 for (i
= 0; i
< val
->n
; i
++)
872 fprintf (file
, " [%d] = ", i
);
873 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
875 fprintf (file
, " * ");
876 print_dec (val
->elts
[i
].coef
, file
, sgn
);
878 fprintf (file
, ", \n");
880 fprintf (file
, "\n }");
884 fprintf (file
, "\n rest = ");
885 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
887 fprintf (file
, "\n}");
890 /* Prints the affine VAL to the standard error, used for debugging. */
893 debug_aff (aff_tree
*val
)
895 print_aff (stderr
, val
);
896 fprintf (stderr
, "\n");
899 /* Computes address of the reference REF in ADDR. The size of the accessed
900 location is stored to SIZE. Returns the ultimate containing object to
904 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
906 HOST_WIDE_INT bitsize
, bitpos
;
911 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
913 tree base_addr
= build_fold_addr_expr (base
);
915 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
917 tree_to_aff_combination (base_addr
, sizetype
, addr
);
921 tree_to_aff_combination (toff
, sizetype
, &tmp
);
922 aff_combination_add (addr
, &tmp
);
925 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
926 aff_combination_add (addr
, &tmp
);
928 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
933 /* Returns true if a region of size SIZE1 at position 0 and a region of
934 size SIZE2 at position DIFF cannot overlap. */
937 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
938 const widest_int
&size2
)
940 /* Unless the difference is a constant, we fail. */
944 if (wi::neg_p (diff
->offset
))
946 /* The second object is before the first one, we succeed if the last
947 element of the second object is before the start of the first one. */
948 return wi::neg_p (diff
->offset
+ size2
- 1);
952 /* We succeed if the second object starts after the first one ends. */
953 return wi::les_p (size1
, diff
->offset
);