1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
30 #include "hard-reg-set.h"
34 #include "insn-config.h"
43 #include "tree-pretty-print.h"
44 #include "tree-affine.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
54 #include "cfgexpand.h"
55 #include "wide-int-print.h"
57 /* Extends CST as appropriate for the affine combinations COMB. */
60 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
62 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
65 /* Initializes affine combination COMB so that its value is zero in TYPE. */
68 aff_combination_zero (aff_tree
*comb
, tree type
)
74 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
75 comb
->elts
[i
].coef
= 0;
76 comb
->rest
= NULL_TREE
;
79 /* Sets COMB to CST. */
82 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
84 aff_combination_zero (comb
, type
);
85 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
88 /* Sets COMB to single element ELT. */
91 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
93 aff_combination_zero (comb
, type
);
96 comb
->elts
[0].val
= elt
;
97 comb
->elts
[0].coef
= 1;
100 /* Scales COMB by SCALE. */
103 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
107 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
113 aff_combination_zero (comb
, comb
->type
);
117 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
118 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
121 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
122 /* A coefficient may become zero due to overflow. Remove the zero
126 comb
->elts
[j
].coef
= new_coef
;
127 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
134 tree type
= comb
->type
;
135 if (POINTER_TYPE_P (type
))
137 if (comb
->n
< MAX_AFF_ELTS
)
139 comb
->elts
[comb
->n
].coef
= scale
;
140 comb
->elts
[comb
->n
].val
= comb
->rest
;
141 comb
->rest
= NULL_TREE
;
145 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
146 wide_int_to_tree (type
, scale
));
150 /* Adds ELT * SCALE to COMB. */
153 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
158 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
162 for (i
= 0; i
< comb
->n
; i
++)
163 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
166 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
169 comb
->elts
[i
].coef
= new_coef
;
174 comb
->elts
[i
] = comb
->elts
[comb
->n
];
178 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
179 comb
->elts
[comb
->n
].coef
= 1;
180 comb
->elts
[comb
->n
].val
= comb
->rest
;
181 comb
->rest
= NULL_TREE
;
186 if (comb
->n
< MAX_AFF_ELTS
)
188 comb
->elts
[comb
->n
].coef
= scale
;
189 comb
->elts
[comb
->n
].val
= elt
;
195 if (POINTER_TYPE_P (type
))
199 elt
= fold_convert (type
, elt
);
201 elt
= fold_build2 (MULT_EXPR
, type
,
202 fold_convert (type
, elt
),
203 wide_int_to_tree (type
, scale
));
206 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
215 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
217 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
220 /* Adds COMB2 to COMB1. */
223 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
227 aff_combination_add_cst (comb1
, comb2
->offset
);
228 for (i
= 0; i
< comb2
->n
; i
++)
229 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
231 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
234 /* Converts affine combination COMB to TYPE. */
237 aff_combination_convert (aff_tree
*comb
, tree type
)
240 tree comb_type
= comb
->type
;
242 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
244 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
245 tree_to_aff_combination (val
, type
, comb
);
250 if (comb
->rest
&& !POINTER_TYPE_P (type
))
251 comb
->rest
= fold_convert (type
, comb
->rest
);
253 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
256 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
257 for (i
= j
= 0; i
< comb
->n
; i
++)
259 if (comb
->elts
[i
].coef
== 0)
261 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
262 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
267 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
269 comb
->elts
[comb
->n
].coef
= 1;
270 comb
->elts
[comb
->n
].val
= comb
->rest
;
271 comb
->rest
= NULL_TREE
;
276 /* Splits EXPR into an affine combination of parts. */
279 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
283 tree cst
, core
, toffset
;
284 HOST_WIDE_INT bitpos
, bitsize
;
286 int unsignedp
, volatilep
;
290 code
= TREE_CODE (expr
);
294 aff_combination_const (comb
, type
, wi::to_widest (expr
));
297 case POINTER_PLUS_EXPR
:
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
300 aff_combination_add (comb
, &tmp
);
305 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
306 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
307 if (code
== MINUS_EXPR
)
308 aff_combination_scale (&tmp
, -1);
309 aff_combination_add (comb
, &tmp
);
313 cst
= TREE_OPERAND (expr
, 1);
314 if (TREE_CODE (cst
) != INTEGER_CST
)
316 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
317 aff_combination_scale (comb
, wi::to_widest (cst
));
321 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
322 aff_combination_scale (comb
, -1);
327 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
328 aff_combination_scale (comb
, -1);
329 aff_combination_add_cst (comb
, -1);
333 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
334 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
336 expr
= TREE_OPERAND (expr
, 0);
337 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
338 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
339 aff_combination_add (comb
, &tmp
);
342 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
343 &toffset
, &mode
, &unsignedp
, &volatilep
,
345 if (bitpos
% BITS_PER_UNIT
!= 0)
347 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
348 if (TREE_CODE (core
) == MEM_REF
)
350 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
351 core
= TREE_OPERAND (core
, 0);
354 core
= build_fold_addr_expr (core
);
356 if (TREE_CODE (core
) == ADDR_EXPR
)
357 aff_combination_add_elt (comb
, core
, 1);
360 tree_to_aff_combination (core
, type
, &tmp
);
361 aff_combination_add (comb
, &tmp
);
365 tree_to_aff_combination (toffset
, type
, &tmp
);
366 aff_combination_add (comb
, &tmp
);
371 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
372 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
374 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
376 aff_combination_elt (comb
, type
, expr
);
380 aff_combination_elt (comb
, type
,
381 build2 (MEM_REF
, TREE_TYPE (expr
),
382 TREE_OPERAND (expr
, 0),
384 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
385 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
386 aff_combination_add (comb
, &tmp
);
393 aff_combination_elt (comb
, type
, expr
);
396 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
400 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
401 aff_tree
*comb ATTRIBUTE_UNUSED
)
405 if (POINTER_TYPE_P (type
))
408 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
411 && POINTER_TYPE_P (TREE_TYPE (elt
)))
413 elt
= convert_to_ptrofftype (elt
);
414 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
422 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
425 return fold_convert (type1
, elt
);
428 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
429 return fold_build_pointer_plus (expr
, elt
);
430 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
431 return fold_build_pointer_plus (elt
, expr
);
432 return fold_build2 (PLUS_EXPR
, type1
,
433 expr
, fold_convert (type1
, elt
));
439 return fold_build1 (NEGATE_EXPR
, type1
,
440 fold_convert (type1
, elt
));
442 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
444 elt
= convert_to_ptrofftype (elt
);
445 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
446 return fold_build_pointer_plus (expr
, elt
);
448 return fold_build2 (MINUS_EXPR
, type1
,
449 expr
, fold_convert (type1
, elt
));
452 elt
= fold_convert (type1
, elt
);
454 return fold_build2 (MULT_EXPR
, type1
, elt
,
455 wide_int_to_tree (type1
, scale
));
457 if (wi::neg_p (scale
))
465 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
466 wide_int_to_tree (type1
, scale
));
467 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
469 if (code
== MINUS_EXPR
)
470 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
471 return fold_build_pointer_plus (expr
, elt
);
473 return fold_build2 (code
, type1
, expr
, elt
);
476 /* Makes tree from the affine combination COMB. */
479 aff_combination_to_tree (aff_tree
*comb
)
481 tree type
= comb
->type
;
482 tree expr
= NULL_TREE
;
486 if (POINTER_TYPE_P (type
))
489 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
491 for (i
= 0; i
< comb
->n
; i
++)
492 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
496 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
498 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
500 if (wi::neg_p (comb
->offset
))
510 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
514 /* Copies the tree elements of COMB to ensure that they are not shared. */
517 unshare_aff_combination (aff_tree
*comb
)
521 for (i
= 0; i
< comb
->n
; i
++)
522 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
524 comb
->rest
= unshare_expr (comb
->rest
);
527 /* Remove M-th element from COMB. */
530 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
534 comb
->elts
[m
] = comb
->elts
[comb
->n
];
537 comb
->elts
[comb
->n
].coef
= 1;
538 comb
->elts
[comb
->n
].val
= comb
->rest
;
539 comb
->rest
= NULL_TREE
;
544 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
545 C * COEF is added to R. */
549 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
555 for (i
= 0; i
< c
->n
; i
++)
557 aval
= c
->elts
[i
].val
;
560 type
= TREE_TYPE (aval
);
561 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
562 fold_convert (type
, val
));
565 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
573 type
= TREE_TYPE (aval
);
574 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
575 fold_convert (type
, val
));
578 aff_combination_add_elt (r
, aval
, coef
);
582 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
584 aff_combination_add_cst (r
, coef
* c
->offset
);
587 /* Multiplies C1 by C2, storing the result to R */
590 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
593 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
595 aff_combination_zero (r
, c1
->type
);
597 for (i
= 0; i
< c2
->n
; i
++)
598 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
600 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
601 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
604 /* Returns the element of COMB whose value is VAL, or NULL if no such
605 element exists. If IDX is not NULL, it is set to the index of VAL in
608 static struct aff_comb_elt
*
609 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
613 for (i
= 0; i
< comb
->n
; i
++)
614 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
619 return &comb
->elts
[i
];
625 /* Element of the cache that maps ssa name NAME to its expanded form
626 as an affine expression EXPANSION. */
628 struct name_expansion
632 /* True if the expansion for the name is just being generated. */
633 unsigned in_progress
: 1;
636 /* Expands SSA names in COMB recursively. CACHE is used to cache the
640 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
641 hash_map
<tree
, name_expansion
*> **cache
)
644 aff_tree to_add
, current
, curre
;
648 struct name_expansion
*exp
;
650 aff_combination_zero (&to_add
, comb
->type
);
651 for (i
= 0; i
< comb
->n
; i
++)
656 e
= comb
->elts
[i
].val
;
657 type
= TREE_TYPE (e
);
659 /* Look through some conversions. */
660 if (CONVERT_EXPR_P (e
)
661 && (TYPE_PRECISION (type
)
662 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
663 name
= TREE_OPERAND (e
, 0);
664 if (TREE_CODE (name
) != SSA_NAME
)
666 def
= SSA_NAME_DEF_STMT (name
);
667 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
670 code
= gimple_assign_rhs_code (def
);
672 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
673 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
674 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
677 /* We do not know whether the reference retains its value at the
678 place where the expansion is used. */
679 if (TREE_CODE_CLASS (code
) == tcc_reference
)
683 *cache
= new hash_map
<tree
, name_expansion
*>;
684 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
689 exp
= XNEW (struct name_expansion
);
690 exp
->in_progress
= 1;
692 /* In principle this is a generally valid folding, but
693 it is not unconditionally an optimization, so do it
694 here and not in fold_unary. */
695 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
696 than the type of X and overflow for the type of X is
699 && INTEGRAL_TYPE_P (type
)
700 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
701 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
702 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
703 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
704 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
705 rhs
= fold_build2 (code
, type
,
706 fold_convert (type
, gimple_assign_rhs1 (def
)),
707 fold_convert (type
, gimple_assign_rhs2 (def
)));
710 rhs
= gimple_assign_rhs_to_tree (def
);
712 rhs
= fold_convert (type
, rhs
);
714 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
715 exp
->expansion
= current
;
716 exp
->in_progress
= 0;
720 /* Since we follow the definitions in the SSA form, we should not
721 enter a cycle unless we pass through a phi node. */
722 gcc_assert (!exp
->in_progress
);
723 current
= exp
->expansion
;
726 /* Accumulate the new terms to TO_ADD, so that we do not modify
727 COMB while traversing it; include the term -coef * E, to remove
729 scale
= comb
->elts
[i
].coef
;
730 aff_combination_zero (&curre
, comb
->type
);
731 aff_combination_add_elt (&curre
, e
, -scale
);
732 aff_combination_scale (¤t
, scale
);
733 aff_combination_add (&to_add
, ¤t
);
734 aff_combination_add (&to_add
, &curre
);
736 aff_combination_add (comb
, &to_add
);
739 /* Similar to tree_to_aff_combination, but follows SSA name definitions
740 and expands them recursively. CACHE is used to cache the expansions
741 of the ssa names, to avoid exponential time complexity for cases
750 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
751 hash_map
<tree
, name_expansion
*> **cache
)
753 tree_to_aff_combination (expr
, type
, comb
);
754 aff_combination_expand (comb
, cache
);
757 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
758 hash_map::traverse. */
761 free_name_expansion (tree
const &, name_expansion
**value
, void *)
767 /* Frees memory allocated for the CACHE used by
768 tree_to_aff_combination_expand. */
771 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
776 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
781 /* If VAL != CST * DIV for any constant CST, returns false.
782 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
783 and if they are different, returns false. Finally, if neither of these
784 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
788 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
789 bool *mult_set
, widest_int
*mult
)
795 if (*mult_set
&& mult
!= 0)
805 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
808 if (*mult_set
&& *mult
!= cst
)
816 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
817 X is stored to MULT. */
820 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
823 bool mult_set
= false;
826 if (val
->n
== 0 && val
->offset
== 0)
831 if (val
->n
!= div
->n
)
834 if (val
->rest
|| div
->rest
)
837 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
841 for (i
= 0; i
< div
->n
; i
++)
843 struct aff_comb_elt
*elt
844 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
847 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
852 gcc_assert (mult_set
);
856 /* Prints the affine VAL to the FILE. */
859 print_aff (FILE *file
, aff_tree
*val
)
862 signop sgn
= TYPE_SIGN (val
->type
);
863 if (POINTER_TYPE_P (val
->type
))
865 fprintf (file
, "{\n type = ");
866 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
867 fprintf (file
, "\n offset = ");
868 print_dec (val
->offset
, file
, sgn
);
871 fprintf (file
, "\n elements = {\n");
872 for (i
= 0; i
< val
->n
; i
++)
874 fprintf (file
, " [%d] = ", i
);
875 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
877 fprintf (file
, " * ");
878 print_dec (val
->elts
[i
].coef
, file
, sgn
);
880 fprintf (file
, ", \n");
882 fprintf (file
, "\n }");
886 fprintf (file
, "\n rest = ");
887 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
889 fprintf (file
, "\n}");
892 /* Prints the affine VAL to the standard error, used for debugging. */
895 debug_aff (aff_tree
*val
)
897 print_aff (stderr
, val
);
898 fprintf (stderr
, "\n");
901 /* Computes address of the reference REF in ADDR. The size of the accessed
902 location is stored to SIZE. Returns the ultimate containing object to
906 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
908 HOST_WIDE_INT bitsize
, bitpos
;
913 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
915 tree base_addr
= build_fold_addr_expr (base
);
917 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
919 tree_to_aff_combination (base_addr
, sizetype
, addr
);
923 tree_to_aff_combination (toff
, sizetype
, &tmp
);
924 aff_combination_add (addr
, &tmp
);
927 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
928 aff_combination_add (addr
, &tmp
);
930 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
935 /* Returns true if a region of size SIZE1 at position 0 and a region of
936 size SIZE2 at position DIFF cannot overlap. */
939 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
940 const widest_int
&size2
)
942 /* Unless the difference is a constant, we fail. */
946 if (wi::neg_p (diff
->offset
))
948 /* The second object is before the first one, we succeed if the last
949 element of the second object is before the start of the first one. */
950 return wi::neg_p (diff
->offset
+ size2
- 1);
954 /* We succeed if the second object starts after the first one ends. */
955 return wi::les_p (size1
, diff
->offset
);