1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
41 #include "statistics.h"
43 #include "fixed-value.h"
44 #include "insn-config.h"
53 #include "tree-pretty-print.h"
54 #include "tree-affine.h"
56 #include "basic-block.h"
57 #include "tree-ssa-alias.h"
58 #include "internal-fn.h"
59 #include "gimple-expr.h"
64 #include "cfgexpand.h"
65 #include "wide-int-print.h"
67 /* Extends CST as appropriate for the affine combinations COMB. */
70 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
72 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
75 /* Initializes affine combination COMB so that its value is zero in TYPE. */
78 aff_combination_zero (aff_tree
*comb
, tree type
)
84 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
85 comb
->elts
[i
].coef
= 0;
86 comb
->rest
= NULL_TREE
;
89 /* Sets COMB to CST. */
92 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
94 aff_combination_zero (comb
, type
);
95 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
98 /* Sets COMB to single element ELT. */
101 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
103 aff_combination_zero (comb
, type
);
106 comb
->elts
[0].val
= elt
;
107 comb
->elts
[0].coef
= 1;
110 /* Scales COMB by SCALE. */
113 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
117 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
123 aff_combination_zero (comb
, comb
->type
);
127 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
128 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
131 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
132 /* A coefficient may become zero due to overflow. Remove the zero
136 comb
->elts
[j
].coef
= new_coef
;
137 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
144 tree type
= comb
->type
;
145 if (POINTER_TYPE_P (type
))
147 if (comb
->n
< MAX_AFF_ELTS
)
149 comb
->elts
[comb
->n
].coef
= scale
;
150 comb
->elts
[comb
->n
].val
= comb
->rest
;
151 comb
->rest
= NULL_TREE
;
155 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
156 wide_int_to_tree (type
, scale
));
160 /* Adds ELT * SCALE to COMB. */
163 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
168 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
172 for (i
= 0; i
< comb
->n
; i
++)
173 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
176 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
179 comb
->elts
[i
].coef
= new_coef
;
184 comb
->elts
[i
] = comb
->elts
[comb
->n
];
188 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
189 comb
->elts
[comb
->n
].coef
= 1;
190 comb
->elts
[comb
->n
].val
= comb
->rest
;
191 comb
->rest
= NULL_TREE
;
196 if (comb
->n
< MAX_AFF_ELTS
)
198 comb
->elts
[comb
->n
].coef
= scale
;
199 comb
->elts
[comb
->n
].val
= elt
;
205 if (POINTER_TYPE_P (type
))
209 elt
= fold_convert (type
, elt
);
211 elt
= fold_build2 (MULT_EXPR
, type
,
212 fold_convert (type
, elt
),
213 wide_int_to_tree (type
, scale
));
216 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
225 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
227 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
230 /* Adds COMB2 to COMB1. */
233 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
237 aff_combination_add_cst (comb1
, comb2
->offset
);
238 for (i
= 0; i
< comb2
->n
; i
++)
239 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
241 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
244 /* Converts affine combination COMB to TYPE. */
247 aff_combination_convert (aff_tree
*comb
, tree type
)
250 tree comb_type
= comb
->type
;
252 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
254 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
255 tree_to_aff_combination (val
, type
, comb
);
260 if (comb
->rest
&& !POINTER_TYPE_P (type
))
261 comb
->rest
= fold_convert (type
, comb
->rest
);
263 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
266 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
267 for (i
= j
= 0; i
< comb
->n
; i
++)
269 if (comb
->elts
[i
].coef
== 0)
271 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
272 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
277 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
279 comb
->elts
[comb
->n
].coef
= 1;
280 comb
->elts
[comb
->n
].val
= comb
->rest
;
281 comb
->rest
= NULL_TREE
;
286 /* Splits EXPR into an affine combination of parts. */
289 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
293 tree cst
, core
, toffset
;
294 HOST_WIDE_INT bitpos
, bitsize
;
296 int unsignedp
, volatilep
;
300 code
= TREE_CODE (expr
);
304 aff_combination_const (comb
, type
, wi::to_widest (expr
));
307 case POINTER_PLUS_EXPR
:
308 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
309 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
310 aff_combination_add (comb
, &tmp
);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
316 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
317 if (code
== MINUS_EXPR
)
318 aff_combination_scale (&tmp
, -1);
319 aff_combination_add (comb
, &tmp
);
323 cst
= TREE_OPERAND (expr
, 1);
324 if (TREE_CODE (cst
) != INTEGER_CST
)
326 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
327 aff_combination_scale (comb
, wi::to_widest (cst
));
331 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
332 aff_combination_scale (comb
, -1);
337 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
338 aff_combination_scale (comb
, -1);
339 aff_combination_add_cst (comb
, -1);
343 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
344 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
346 expr
= TREE_OPERAND (expr
, 0);
347 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
348 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
349 aff_combination_add (comb
, &tmp
);
352 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
353 &toffset
, &mode
, &unsignedp
, &volatilep
,
355 if (bitpos
% BITS_PER_UNIT
!= 0)
357 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
358 if (TREE_CODE (core
) == MEM_REF
)
360 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
361 core
= TREE_OPERAND (core
, 0);
364 core
= build_fold_addr_expr (core
);
366 if (TREE_CODE (core
) == ADDR_EXPR
)
367 aff_combination_add_elt (comb
, core
, 1);
370 tree_to_aff_combination (core
, type
, &tmp
);
371 aff_combination_add (comb
, &tmp
);
375 tree_to_aff_combination (toffset
, type
, &tmp
);
376 aff_combination_add (comb
, &tmp
);
381 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
382 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
384 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
386 aff_combination_elt (comb
, type
, expr
);
390 aff_combination_elt (comb
, type
,
391 build2 (MEM_REF
, TREE_TYPE (expr
),
392 TREE_OPERAND (expr
, 0),
394 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
395 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
396 aff_combination_add (comb
, &tmp
);
403 aff_combination_elt (comb
, type
, expr
);
406 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
410 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
411 aff_tree
*comb ATTRIBUTE_UNUSED
)
415 if (POINTER_TYPE_P (type
))
418 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
421 && POINTER_TYPE_P (TREE_TYPE (elt
)))
423 elt
= convert_to_ptrofftype (elt
);
424 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
432 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
435 return fold_convert (type1
, elt
);
438 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
439 return fold_build_pointer_plus (expr
, elt
);
440 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
441 return fold_build_pointer_plus (elt
, expr
);
442 return fold_build2 (PLUS_EXPR
, type1
,
443 expr
, fold_convert (type1
, elt
));
449 return fold_build1 (NEGATE_EXPR
, type1
,
450 fold_convert (type1
, elt
));
452 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
454 elt
= convert_to_ptrofftype (elt
);
455 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
456 return fold_build_pointer_plus (expr
, elt
);
458 return fold_build2 (MINUS_EXPR
, type1
,
459 expr
, fold_convert (type1
, elt
));
462 elt
= fold_convert (type1
, elt
);
464 return fold_build2 (MULT_EXPR
, type1
, elt
,
465 wide_int_to_tree (type1
, scale
));
467 if (wi::neg_p (scale
))
475 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
476 wide_int_to_tree (type1
, scale
));
477 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
479 if (code
== MINUS_EXPR
)
480 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
481 return fold_build_pointer_plus (expr
, elt
);
483 return fold_build2 (code
, type1
, expr
, elt
);
486 /* Makes tree from the affine combination COMB. */
489 aff_combination_to_tree (aff_tree
*comb
)
491 tree type
= comb
->type
;
492 tree expr
= NULL_TREE
;
496 if (POINTER_TYPE_P (type
))
499 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
501 for (i
= 0; i
< comb
->n
; i
++)
502 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
506 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
508 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
510 if (wi::neg_p (comb
->offset
))
520 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
524 /* Copies the tree elements of COMB to ensure that they are not shared. */
527 unshare_aff_combination (aff_tree
*comb
)
531 for (i
= 0; i
< comb
->n
; i
++)
532 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
534 comb
->rest
= unshare_expr (comb
->rest
);
537 /* Remove M-th element from COMB. */
540 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
544 comb
->elts
[m
] = comb
->elts
[comb
->n
];
547 comb
->elts
[comb
->n
].coef
= 1;
548 comb
->elts
[comb
->n
].val
= comb
->rest
;
549 comb
->rest
= NULL_TREE
;
554 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
555 C * COEF is added to R. */
559 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
565 for (i
= 0; i
< c
->n
; i
++)
567 aval
= c
->elts
[i
].val
;
570 type
= TREE_TYPE (aval
);
571 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
572 fold_convert (type
, val
));
575 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
583 type
= TREE_TYPE (aval
);
584 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
585 fold_convert (type
, val
));
588 aff_combination_add_elt (r
, aval
, coef
);
592 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
594 aff_combination_add_cst (r
, coef
* c
->offset
);
597 /* Multiplies C1 by C2, storing the result to R */
600 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
603 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
605 aff_combination_zero (r
, c1
->type
);
607 for (i
= 0; i
< c2
->n
; i
++)
608 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
610 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
611 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
614 /* Returns the element of COMB whose value is VAL, or NULL if no such
615 element exists. If IDX is not NULL, it is set to the index of VAL in
618 static struct aff_comb_elt
*
619 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
623 for (i
= 0; i
< comb
->n
; i
++)
624 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
629 return &comb
->elts
[i
];
635 /* Element of the cache that maps ssa name NAME to its expanded form
636 as an affine expression EXPANSION. */
638 struct name_expansion
642 /* True if the expansion for the name is just being generated. */
643 unsigned in_progress
: 1;
646 /* Expands SSA names in COMB recursively. CACHE is used to cache the
650 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
651 hash_map
<tree
, name_expansion
*> **cache
)
654 aff_tree to_add
, current
, curre
;
658 struct name_expansion
*exp
;
660 aff_combination_zero (&to_add
, comb
->type
);
661 for (i
= 0; i
< comb
->n
; i
++)
666 e
= comb
->elts
[i
].val
;
667 type
= TREE_TYPE (e
);
669 /* Look through some conversions. */
670 if (CONVERT_EXPR_P (e
)
671 && (TYPE_PRECISION (type
)
672 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
673 name
= TREE_OPERAND (e
, 0);
674 if (TREE_CODE (name
) != SSA_NAME
)
676 def
= SSA_NAME_DEF_STMT (name
);
677 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
680 code
= gimple_assign_rhs_code (def
);
682 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
683 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
684 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
687 /* We do not know whether the reference retains its value at the
688 place where the expansion is used. */
689 if (TREE_CODE_CLASS (code
) == tcc_reference
)
693 *cache
= new hash_map
<tree
, name_expansion
*>;
694 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
699 exp
= XNEW (struct name_expansion
);
700 exp
->in_progress
= 1;
702 /* In principle this is a generally valid folding, but
703 it is not unconditionally an optimization, so do it
704 here and not in fold_unary. */
705 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
706 than the type of X and overflow for the type of X is
709 && INTEGRAL_TYPE_P (type
)
710 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
711 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
712 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
713 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
714 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
715 rhs
= fold_build2 (code
, type
,
716 fold_convert (type
, gimple_assign_rhs1 (def
)),
717 fold_convert (type
, gimple_assign_rhs2 (def
)));
720 rhs
= gimple_assign_rhs_to_tree (def
);
722 rhs
= fold_convert (type
, rhs
);
724 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
725 exp
->expansion
= current
;
726 exp
->in_progress
= 0;
730 /* Since we follow the definitions in the SSA form, we should not
731 enter a cycle unless we pass through a phi node. */
732 gcc_assert (!exp
->in_progress
);
733 current
= exp
->expansion
;
736 /* Accumulate the new terms to TO_ADD, so that we do not modify
737 COMB while traversing it; include the term -coef * E, to remove
739 scale
= comb
->elts
[i
].coef
;
740 aff_combination_zero (&curre
, comb
->type
);
741 aff_combination_add_elt (&curre
, e
, -scale
);
742 aff_combination_scale (¤t
, scale
);
743 aff_combination_add (&to_add
, ¤t
);
744 aff_combination_add (&to_add
, &curre
);
746 aff_combination_add (comb
, &to_add
);
749 /* Similar to tree_to_aff_combination, but follows SSA name definitions
750 and expands them recursively. CACHE is used to cache the expansions
751 of the ssa names, to avoid exponential time complexity for cases
760 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
761 hash_map
<tree
, name_expansion
*> **cache
)
763 tree_to_aff_combination (expr
, type
, comb
);
764 aff_combination_expand (comb
, cache
);
767 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
768 hash_map::traverse. */
771 free_name_expansion (tree
const &, name_expansion
**value
, void *)
777 /* Frees memory allocated for the CACHE used by
778 tree_to_aff_combination_expand. */
781 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
786 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
791 /* If VAL != CST * DIV for any constant CST, returns false.
792 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
793 and if they are different, returns false. Finally, if neither of these
794 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
798 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
799 bool *mult_set
, widest_int
*mult
)
805 if (*mult_set
&& mult
!= 0)
815 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
818 if (*mult_set
&& *mult
!= cst
)
826 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
827 X is stored to MULT. */
830 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
833 bool mult_set
= false;
836 if (val
->n
== 0 && val
->offset
== 0)
841 if (val
->n
!= div
->n
)
844 if (val
->rest
|| div
->rest
)
847 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
851 for (i
= 0; i
< div
->n
; i
++)
853 struct aff_comb_elt
*elt
854 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
857 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
862 gcc_assert (mult_set
);
866 /* Prints the affine VAL to the FILE. */
869 print_aff (FILE *file
, aff_tree
*val
)
872 signop sgn
= TYPE_SIGN (val
->type
);
873 if (POINTER_TYPE_P (val
->type
))
875 fprintf (file
, "{\n type = ");
876 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
877 fprintf (file
, "\n offset = ");
878 print_dec (val
->offset
, file
, sgn
);
881 fprintf (file
, "\n elements = {\n");
882 for (i
= 0; i
< val
->n
; i
++)
884 fprintf (file
, " [%d] = ", i
);
885 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
887 fprintf (file
, " * ");
888 print_dec (val
->elts
[i
].coef
, file
, sgn
);
890 fprintf (file
, ", \n");
892 fprintf (file
, "\n }");
896 fprintf (file
, "\n rest = ");
897 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
899 fprintf (file
, "\n}");
902 /* Prints the affine VAL to the standard error, used for debugging. */
905 debug_aff (aff_tree
*val
)
907 print_aff (stderr
, val
);
908 fprintf (stderr
, "\n");
911 /* Computes address of the reference REF in ADDR. The size of the accessed
912 location is stored to SIZE. Returns the ultimate containing object to
916 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
918 HOST_WIDE_INT bitsize
, bitpos
;
923 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
925 tree base_addr
= build_fold_addr_expr (base
);
927 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
929 tree_to_aff_combination (base_addr
, sizetype
, addr
);
933 tree_to_aff_combination (toff
, sizetype
, &tmp
);
934 aff_combination_add (addr
, &tmp
);
937 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
938 aff_combination_add (addr
, &tmp
);
940 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
945 /* Returns true if a region of size SIZE1 at position 0 and a region of
946 size SIZE2 at position DIFF cannot overlap. */
949 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
950 const widest_int
&size2
)
952 /* Unless the difference is a constant, we fail. */
956 if (wi::neg_p (diff
->offset
))
958 /* The second object is before the first one, we succeed if the last
959 element of the second object is before the start of the first one. */
960 return wi::neg_p (diff
->offset
+ size2
- 1);
964 /* We succeed if the second object starts after the first one ends. */
965 return wi::les_p (size1
, diff
->offset
);