1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "tree-affine.h"
33 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
45 #include "cfgexpand.h"
46 #include "wide-int-print.h"
48 /* Extends CST as appropriate for the affine combinations COMB. */
51 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
53 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
56 /* Initializes affine combination COMB so that its value is zero in TYPE. */
59 aff_combination_zero (aff_tree
*comb
, tree type
)
65 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
66 comb
->elts
[i
].coef
= 0;
67 comb
->rest
= NULL_TREE
;
70 /* Sets COMB to CST. */
73 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
75 aff_combination_zero (comb
, type
);
76 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
79 /* Sets COMB to single element ELT. */
82 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
84 aff_combination_zero (comb
, type
);
87 comb
->elts
[0].val
= elt
;
88 comb
->elts
[0].coef
= 1;
91 /* Scales COMB by SCALE. */
94 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
98 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
104 aff_combination_zero (comb
, comb
->type
);
108 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
109 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
112 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
113 /* A coefficient may become zero due to overflow. Remove the zero
117 comb
->elts
[j
].coef
= new_coef
;
118 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
125 tree type
= comb
->type
;
126 if (POINTER_TYPE_P (type
))
128 if (comb
->n
< MAX_AFF_ELTS
)
130 comb
->elts
[comb
->n
].coef
= scale
;
131 comb
->elts
[comb
->n
].val
= comb
->rest
;
132 comb
->rest
= NULL_TREE
;
136 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
137 wide_int_to_tree (type
, scale
));
141 /* Adds ELT * SCALE to COMB. */
144 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
149 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
153 for (i
= 0; i
< comb
->n
; i
++)
154 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
157 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
160 comb
->elts
[i
].coef
= new_coef
;
165 comb
->elts
[i
] = comb
->elts
[comb
->n
];
169 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
170 comb
->elts
[comb
->n
].coef
= 1;
171 comb
->elts
[comb
->n
].val
= comb
->rest
;
172 comb
->rest
= NULL_TREE
;
177 if (comb
->n
< MAX_AFF_ELTS
)
179 comb
->elts
[comb
->n
].coef
= scale
;
180 comb
->elts
[comb
->n
].val
= elt
;
186 if (POINTER_TYPE_P (type
))
190 elt
= fold_convert (type
, elt
);
192 elt
= fold_build2 (MULT_EXPR
, type
,
193 fold_convert (type
, elt
),
194 wide_int_to_tree (type
, scale
));
197 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
206 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
208 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
211 /* Adds COMB2 to COMB1. */
214 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
218 aff_combination_add_cst (comb1
, comb2
->offset
);
219 for (i
= 0; i
< comb2
->n
; i
++)
220 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
222 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
225 /* Converts affine combination COMB to TYPE. */
228 aff_combination_convert (aff_tree
*comb
, tree type
)
231 tree comb_type
= comb
->type
;
233 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
235 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
236 tree_to_aff_combination (val
, type
, comb
);
241 if (comb
->rest
&& !POINTER_TYPE_P (type
))
242 comb
->rest
= fold_convert (type
, comb
->rest
);
244 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
247 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
248 for (i
= j
= 0; i
< comb
->n
; i
++)
250 if (comb
->elts
[i
].coef
== 0)
252 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
253 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
258 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
260 comb
->elts
[comb
->n
].coef
= 1;
261 comb
->elts
[comb
->n
].val
= comb
->rest
;
262 comb
->rest
= NULL_TREE
;
267 /* Splits EXPR into an affine combination of parts. */
270 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
274 tree cst
, core
, toffset
;
275 HOST_WIDE_INT bitpos
, bitsize
;
277 int unsignedp
, volatilep
;
281 code
= TREE_CODE (expr
);
285 aff_combination_const (comb
, type
, wi::to_widest (expr
));
288 case POINTER_PLUS_EXPR
:
289 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
290 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
291 aff_combination_add (comb
, &tmp
);
296 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
297 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
298 if (code
== MINUS_EXPR
)
299 aff_combination_scale (&tmp
, -1);
300 aff_combination_add (comb
, &tmp
);
304 cst
= TREE_OPERAND (expr
, 1);
305 if (TREE_CODE (cst
) != INTEGER_CST
)
307 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
308 aff_combination_scale (comb
, wi::to_widest (cst
));
312 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
313 aff_combination_scale (comb
, -1);
318 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
319 aff_combination_scale (comb
, -1);
320 aff_combination_add_cst (comb
, -1);
324 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
325 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
327 expr
= TREE_OPERAND (expr
, 0);
328 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
329 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
330 aff_combination_add (comb
, &tmp
);
333 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
334 &toffset
, &mode
, &unsignedp
, &volatilep
,
336 if (bitpos
% BITS_PER_UNIT
!= 0)
338 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
339 if (TREE_CODE (core
) == MEM_REF
)
341 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
342 core
= TREE_OPERAND (core
, 0);
345 core
= build_fold_addr_expr (core
);
347 if (TREE_CODE (core
) == ADDR_EXPR
)
348 aff_combination_add_elt (comb
, core
, 1);
351 tree_to_aff_combination (core
, type
, &tmp
);
352 aff_combination_add (comb
, &tmp
);
356 tree_to_aff_combination (toffset
, type
, &tmp
);
357 aff_combination_add (comb
, &tmp
);
362 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
363 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
365 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
367 aff_combination_elt (comb
, type
, expr
);
371 aff_combination_elt (comb
, type
,
372 build2 (MEM_REF
, TREE_TYPE (expr
),
373 TREE_OPERAND (expr
, 0),
375 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
376 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
377 aff_combination_add (comb
, &tmp
);
384 aff_combination_elt (comb
, type
, expr
);
387 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
391 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
392 aff_tree
*comb ATTRIBUTE_UNUSED
)
396 if (POINTER_TYPE_P (type
))
399 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
402 && POINTER_TYPE_P (TREE_TYPE (elt
)))
404 elt
= convert_to_ptrofftype (elt
);
405 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
413 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
416 return fold_convert (type1
, elt
);
419 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
420 return fold_build_pointer_plus (expr
, elt
);
421 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
422 return fold_build_pointer_plus (elt
, expr
);
423 return fold_build2 (PLUS_EXPR
, type1
,
424 expr
, fold_convert (type1
, elt
));
430 return fold_build1 (NEGATE_EXPR
, type1
,
431 fold_convert (type1
, elt
));
433 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
435 elt
= convert_to_ptrofftype (elt
);
436 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
437 return fold_build_pointer_plus (expr
, elt
);
439 return fold_build2 (MINUS_EXPR
, type1
,
440 expr
, fold_convert (type1
, elt
));
443 elt
= fold_convert (type1
, elt
);
445 return fold_build2 (MULT_EXPR
, type1
, elt
,
446 wide_int_to_tree (type1
, scale
));
448 if (wi::neg_p (scale
))
456 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
457 wide_int_to_tree (type1
, scale
));
458 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
460 if (code
== MINUS_EXPR
)
461 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
462 return fold_build_pointer_plus (expr
, elt
);
464 return fold_build2 (code
, type1
, expr
, elt
);
467 /* Makes tree from the affine combination COMB. */
470 aff_combination_to_tree (aff_tree
*comb
)
472 tree type
= comb
->type
;
473 tree expr
= NULL_TREE
;
477 if (POINTER_TYPE_P (type
))
480 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
482 for (i
= 0; i
< comb
->n
; i
++)
483 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
487 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
489 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
491 if (wi::neg_p (comb
->offset
))
501 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
505 /* Copies the tree elements of COMB to ensure that they are not shared. */
508 unshare_aff_combination (aff_tree
*comb
)
512 for (i
= 0; i
< comb
->n
; i
++)
513 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
515 comb
->rest
= unshare_expr (comb
->rest
);
518 /* Remove M-th element from COMB. */
521 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
525 comb
->elts
[m
] = comb
->elts
[comb
->n
];
528 comb
->elts
[comb
->n
].coef
= 1;
529 comb
->elts
[comb
->n
].val
= comb
->rest
;
530 comb
->rest
= NULL_TREE
;
535 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
536 C * COEF is added to R. */
540 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
546 for (i
= 0; i
< c
->n
; i
++)
548 aval
= c
->elts
[i
].val
;
551 type
= TREE_TYPE (aval
);
552 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
553 fold_convert (type
, val
));
556 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
564 type
= TREE_TYPE (aval
);
565 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
566 fold_convert (type
, val
));
569 aff_combination_add_elt (r
, aval
, coef
);
573 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
575 aff_combination_add_cst (r
, coef
* c
->offset
);
578 /* Multiplies C1 by C2, storing the result to R */
581 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
584 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
586 aff_combination_zero (r
, c1
->type
);
588 for (i
= 0; i
< c2
->n
; i
++)
589 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
591 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
592 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
595 /* Returns the element of COMB whose value is VAL, or NULL if no such
596 element exists. If IDX is not NULL, it is set to the index of VAL in
599 static struct aff_comb_elt
*
600 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
604 for (i
= 0; i
< comb
->n
; i
++)
605 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
610 return &comb
->elts
[i
];
616 /* Element of the cache that maps ssa name NAME to its expanded form
617 as an affine expression EXPANSION. */
619 struct name_expansion
623 /* True if the expansion for the name is just being generated. */
624 unsigned in_progress
: 1;
627 /* Expands SSA names in COMB recursively. CACHE is used to cache the
631 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
632 hash_map
<tree
, name_expansion
*> **cache
)
635 aff_tree to_add
, current
, curre
;
639 struct name_expansion
*exp
;
641 aff_combination_zero (&to_add
, comb
->type
);
642 for (i
= 0; i
< comb
->n
; i
++)
647 e
= comb
->elts
[i
].val
;
648 type
= TREE_TYPE (e
);
650 /* Look through some conversions. */
651 if (CONVERT_EXPR_P (e
)
652 && (TYPE_PRECISION (type
)
653 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
654 name
= TREE_OPERAND (e
, 0);
655 if (TREE_CODE (name
) != SSA_NAME
)
657 def
= SSA_NAME_DEF_STMT (name
);
658 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
661 code
= gimple_assign_rhs_code (def
);
663 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
664 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
665 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
668 /* We do not know whether the reference retains its value at the
669 place where the expansion is used. */
670 if (TREE_CODE_CLASS (code
) == tcc_reference
)
674 *cache
= new hash_map
<tree
, name_expansion
*>;
675 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
680 exp
= XNEW (struct name_expansion
);
681 exp
->in_progress
= 1;
683 /* In principle this is a generally valid folding, but
684 it is not unconditionally an optimization, so do it
685 here and not in fold_unary. */
686 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
687 than the type of X and overflow for the type of X is
690 && INTEGRAL_TYPE_P (type
)
691 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
692 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
693 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
694 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
695 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
696 rhs
= fold_build2 (code
, type
,
697 fold_convert (type
, gimple_assign_rhs1 (def
)),
698 fold_convert (type
, gimple_assign_rhs2 (def
)));
701 rhs
= gimple_assign_rhs_to_tree (def
);
703 rhs
= fold_convert (type
, rhs
);
705 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
706 exp
->expansion
= current
;
707 exp
->in_progress
= 0;
711 /* Since we follow the definitions in the SSA form, we should not
712 enter a cycle unless we pass through a phi node. */
713 gcc_assert (!exp
->in_progress
);
714 current
= exp
->expansion
;
717 /* Accumulate the new terms to TO_ADD, so that we do not modify
718 COMB while traversing it; include the term -coef * E, to remove
720 scale
= comb
->elts
[i
].coef
;
721 aff_combination_zero (&curre
, comb
->type
);
722 aff_combination_add_elt (&curre
, e
, -scale
);
723 aff_combination_scale (¤t
, scale
);
724 aff_combination_add (&to_add
, ¤t
);
725 aff_combination_add (&to_add
, &curre
);
727 aff_combination_add (comb
, &to_add
);
730 /* Similar to tree_to_aff_combination, but follows SSA name definitions
731 and expands them recursively. CACHE is used to cache the expansions
732 of the ssa names, to avoid exponential time complexity for cases
741 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
742 hash_map
<tree
, name_expansion
*> **cache
)
744 tree_to_aff_combination (expr
, type
, comb
);
745 aff_combination_expand (comb
, cache
);
748 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
749 hash_map::traverse. */
752 free_name_expansion (tree
const &, name_expansion
**value
, void *)
758 /* Frees memory allocated for the CACHE used by
759 tree_to_aff_combination_expand. */
762 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
767 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
772 /* If VAL != CST * DIV for any constant CST, returns false.
773 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
774 and if they are different, returns false. Finally, if neither of these
775 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
779 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
780 bool *mult_set
, widest_int
*mult
)
786 if (*mult_set
&& mult
!= 0)
796 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
799 if (*mult_set
&& *mult
!= cst
)
807 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
808 X is stored to MULT. */
811 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
814 bool mult_set
= false;
817 if (val
->n
== 0 && val
->offset
== 0)
822 if (val
->n
!= div
->n
)
825 if (val
->rest
|| div
->rest
)
828 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
832 for (i
= 0; i
< div
->n
; i
++)
834 struct aff_comb_elt
*elt
835 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
838 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
843 gcc_assert (mult_set
);
847 /* Prints the affine VAL to the FILE. */
850 print_aff (FILE *file
, aff_tree
*val
)
853 signop sgn
= TYPE_SIGN (val
->type
);
854 if (POINTER_TYPE_P (val
->type
))
856 fprintf (file
, "{\n type = ");
857 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
858 fprintf (file
, "\n offset = ");
859 print_dec (val
->offset
, file
, sgn
);
862 fprintf (file
, "\n elements = {\n");
863 for (i
= 0; i
< val
->n
; i
++)
865 fprintf (file
, " [%d] = ", i
);
866 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
868 fprintf (file
, " * ");
869 print_dec (val
->elts
[i
].coef
, file
, sgn
);
871 fprintf (file
, ", \n");
873 fprintf (file
, "\n }");
877 fprintf (file
, "\n rest = ");
878 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
880 fprintf (file
, "\n}");
883 /* Prints the affine VAL to the standard error, used for debugging. */
886 debug_aff (aff_tree
*val
)
888 print_aff (stderr
, val
);
889 fprintf (stderr
, "\n");
892 /* Computes address of the reference REF in ADDR. The size of the accessed
893 location is stored to SIZE. Returns the ultimate containing object to
897 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
899 HOST_WIDE_INT bitsize
, bitpos
;
904 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
906 tree base_addr
= build_fold_addr_expr (base
);
908 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
910 tree_to_aff_combination (base_addr
, sizetype
, addr
);
914 tree_to_aff_combination (toff
, sizetype
, &tmp
);
915 aff_combination_add (addr
, &tmp
);
918 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
919 aff_combination_add (addr
, &tmp
);
921 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
926 /* Returns true if a region of size SIZE1 at position 0 and a region of
927 size SIZE2 at position DIFF cannot overlap. */
930 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
931 const widest_int
&size2
)
933 /* Unless the difference is a constant, we fail. */
937 if (wi::neg_p (diff
->offset
))
939 /* The second object is before the first one, we succeed if the last
940 element of the second object is before the start of the first one. */
941 return wi::neg_p (diff
->offset
+ size2
- 1);
945 /* We succeed if the second object starts after the first one ends. */
946 return wi::les_p (size1
, diff
->offset
);