1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
31 /* Extends CST as appropriate for the affine combinations COMB. */
34 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
36 return double_int_sext (cst
, TYPE_PRECISION (comb
->type
));
39 /* Initializes affine combination COMB so that its value is zero in TYPE. */
42 aff_combination_zero (aff_tree
*comb
, tree type
)
45 comb
->offset
= double_int_zero
;
47 comb
->rest
= NULL_TREE
;
50 /* Sets COMB to CST. */
53 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
55 aff_combination_zero (comb
, type
);
56 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
59 /* Sets COMB to single element ELT. */
62 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
64 aff_combination_zero (comb
, type
);
67 comb
->elts
[0].val
= elt
;
68 comb
->elts
[0].coef
= double_int_one
;
71 /* Scales COMB by SCALE. */
74 aff_combination_scale (aff_tree
*comb
, double_int scale
)
78 scale
= double_int_ext_for_comb (scale
, comb
);
79 if (double_int_one_p (scale
))
82 if (double_int_zero_p (scale
))
84 aff_combination_zero (comb
, comb
->type
);
89 = double_int_ext_for_comb (double_int_mul (scale
, comb
->offset
), comb
);
90 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
95 = double_int_ext_for_comb (double_int_mul (scale
, comb
->elts
[i
].coef
),
97 /* A coefficient may become zero due to overflow. Remove the zero
99 if (double_int_zero_p (new_coef
))
101 comb
->elts
[j
].coef
= new_coef
;
102 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
109 tree type
= comb
->type
;
110 if (POINTER_TYPE_P (type
))
112 if (comb
->n
< MAX_AFF_ELTS
)
114 comb
->elts
[comb
->n
].coef
= scale
;
115 comb
->elts
[comb
->n
].val
= comb
->rest
;
116 comb
->rest
= NULL_TREE
;
120 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
121 double_int_to_tree (type
, scale
));
125 /* Adds ELT * SCALE to COMB. */
128 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
133 scale
= double_int_ext_for_comb (scale
, comb
);
134 if (double_int_zero_p (scale
))
137 for (i
= 0; i
< comb
->n
; i
++)
138 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
142 new_coef
= double_int_add (comb
->elts
[i
].coef
, scale
);
143 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
144 if (!double_int_zero_p (new_coef
))
146 comb
->elts
[i
].coef
= new_coef
;
151 comb
->elts
[i
] = comb
->elts
[comb
->n
];
155 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
156 comb
->elts
[comb
->n
].coef
= double_int_one
;
157 comb
->elts
[comb
->n
].val
= comb
->rest
;
158 comb
->rest
= NULL_TREE
;
163 if (comb
->n
< MAX_AFF_ELTS
)
165 comb
->elts
[comb
->n
].coef
= scale
;
166 comb
->elts
[comb
->n
].val
= elt
;
172 if (POINTER_TYPE_P (type
))
175 if (double_int_one_p (scale
))
176 elt
= fold_convert (type
, elt
);
178 elt
= fold_build2 (MULT_EXPR
, type
,
179 fold_convert (type
, elt
),
180 double_int_to_tree (type
, scale
));
183 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
192 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
194 c
->offset
= double_int_ext_for_comb (double_int_add (c
->offset
, cst
), c
);
197 /* Adds COMB2 to COMB1. */
200 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
204 aff_combination_add_cst (comb1
, comb2
->offset
);
205 for (i
= 0; i
< comb2
->n
; i
++)
206 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
208 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
211 /* Converts affine combination COMB to TYPE. */
214 aff_combination_convert (aff_tree
*comb
, tree type
)
217 tree comb_type
= comb
->type
;
219 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
221 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
222 tree_to_aff_combination (val
, type
, comb
);
227 if (comb
->rest
&& !POINTER_TYPE_P (type
))
228 comb
->rest
= fold_convert (type
, comb
->rest
);
230 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
233 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
234 for (i
= j
= 0; i
< comb
->n
; i
++)
236 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
237 if (double_int_zero_p (new_coef
))
239 comb
->elts
[j
].coef
= new_coef
;
240 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
245 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
247 comb
->elts
[comb
->n
].coef
= double_int_one
;
248 comb
->elts
[comb
->n
].val
= comb
->rest
;
249 comb
->rest
= NULL_TREE
;
254 /* Splits EXPR into an affine combination of parts. */
257 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
261 tree cst
, core
, toffset
;
262 HOST_WIDE_INT bitpos
, bitsize
;
263 enum machine_mode mode
;
264 int unsignedp
, volatilep
;
268 code
= TREE_CODE (expr
);
272 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
275 case POINTER_PLUS_EXPR
:
276 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
277 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
278 aff_combination_add (comb
, &tmp
);
283 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
284 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
285 if (code
== MINUS_EXPR
)
286 aff_combination_scale (&tmp
, double_int_minus_one
);
287 aff_combination_add (comb
, &tmp
);
291 cst
= TREE_OPERAND (expr
, 1);
292 if (TREE_CODE (cst
) != INTEGER_CST
)
294 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
295 aff_combination_scale (comb
, tree_to_double_int (cst
));
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, double_int_minus_one
);
305 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
306 aff_combination_scale (comb
, double_int_minus_one
);
307 aff_combination_add_cst (comb
, double_int_minus_one
);
311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
312 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
314 expr
= TREE_OPERAND (expr
, 0);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
316 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
317 aff_combination_add (comb
, &tmp
);
320 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
321 &toffset
, &mode
, &unsignedp
, &volatilep
,
323 if (bitpos
% BITS_PER_UNIT
!= 0)
325 aff_combination_const (comb
, type
,
326 uhwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
327 core
= build_fold_addr_expr (core
);
328 if (TREE_CODE (core
) == ADDR_EXPR
)
329 aff_combination_add_elt (comb
, core
, double_int_one
);
332 tree_to_aff_combination (core
, type
, &tmp
);
333 aff_combination_add (comb
, &tmp
);
337 tree_to_aff_combination (toffset
, type
, &tmp
);
338 aff_combination_add (comb
, &tmp
);
343 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
344 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
346 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
348 aff_combination_elt (comb
, type
, expr
);
352 aff_combination_elt (comb
, type
,
353 build2 (MEM_REF
, TREE_TYPE (expr
),
354 TREE_OPERAND (expr
, 0),
356 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
357 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
358 aff_combination_add (comb
, &tmp
);
365 aff_combination_elt (comb
, type
, expr
);
368 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
372 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
377 if (POINTER_TYPE_P (type
))
380 scale
= double_int_ext_for_comb (scale
, comb
);
381 elt
= fold_convert (type1
, elt
);
383 if (double_int_one_p (scale
))
386 return fold_convert (type
, elt
);
388 if (POINTER_TYPE_P (type
))
389 return fold_build_pointer_plus (expr
, elt
);
390 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
393 if (double_int_minus_one_p (scale
))
396 return fold_convert (type
, fold_build1 (NEGATE_EXPR
, type1
, elt
));
398 if (POINTER_TYPE_P (type
))
400 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
401 return fold_build_pointer_plus (expr
, elt
);
403 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
407 return fold_convert (type
,
408 fold_build2 (MULT_EXPR
, type1
, elt
,
409 double_int_to_tree (type1
, scale
)));
411 if (double_int_negative_p (scale
))
414 scale
= double_int_neg (scale
);
419 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
420 double_int_to_tree (type1
, scale
));
421 if (POINTER_TYPE_P (type
))
423 if (code
== MINUS_EXPR
)
424 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
425 return fold_build_pointer_plus (expr
, elt
);
427 return fold_build2 (code
, type
, expr
, elt
);
430 /* Makes tree from the affine combination COMB. */
433 aff_combination_to_tree (aff_tree
*comb
)
435 tree type
= comb
->type
;
436 tree expr
= NULL_TREE
;
440 if (POINTER_TYPE_P (type
))
443 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
445 for (i
= 0; i
< comb
->n
; i
++)
446 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
450 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, double_int_one
, comb
);
452 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
454 if (double_int_negative_p (comb
->offset
))
456 off
= double_int_neg (comb
->offset
);
457 sgn
= double_int_minus_one
;
462 sgn
= double_int_one
;
464 return add_elt_to_tree (expr
, type
, double_int_to_tree (type1
, off
), sgn
,
468 /* Copies the tree elements of COMB to ensure that they are not shared. */
471 unshare_aff_combination (aff_tree
*comb
)
475 for (i
= 0; i
< comb
->n
; i
++)
476 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
478 comb
->rest
= unshare_expr (comb
->rest
);
481 /* Remove M-th element from COMB. */
484 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
488 comb
->elts
[m
] = comb
->elts
[comb
->n
];
491 comb
->elts
[comb
->n
].coef
= double_int_one
;
492 comb
->elts
[comb
->n
].val
= comb
->rest
;
493 comb
->rest
= NULL_TREE
;
498 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
499 C * COEF is added to R. */
503 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
509 for (i
= 0; i
< c
->n
; i
++)
511 aval
= c
->elts
[i
].val
;
514 type
= TREE_TYPE (aval
);
515 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
516 fold_convert (type
, val
));
519 aff_combination_add_elt (r
, aval
,
520 double_int_mul (coef
, c
->elts
[i
].coef
));
528 type
= TREE_TYPE (aval
);
529 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
530 fold_convert (type
, val
));
533 aff_combination_add_elt (r
, aval
, coef
);
537 aff_combination_add_elt (r
, val
,
538 double_int_mul (coef
, c
->offset
));
540 aff_combination_add_cst (r
, double_int_mul (coef
, c
->offset
));
543 /* Multiplies C1 by C2, storing the result to R */
546 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
549 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
551 aff_combination_zero (r
, c1
->type
);
553 for (i
= 0; i
< c2
->n
; i
++)
554 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
556 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
557 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
560 /* Returns the element of COMB whose value is VAL, or NULL if no such
561 element exists. If IDX is not NULL, it is set to the index of VAL in
564 static struct aff_comb_elt
*
565 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
569 for (i
= 0; i
< comb
->n
; i
++)
570 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
575 return &comb
->elts
[i
];
581 /* Element of the cache that maps ssa name NAME to its expanded form
582 as an affine expression EXPANSION. */
584 struct name_expansion
588 /* True if the expansion for the name is just being generated. */
589 unsigned in_progress
: 1;
592 /* Expands SSA names in COMB recursively. CACHE is used to cache the
596 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
597 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
600 aff_tree to_add
, current
, curre
;
605 struct name_expansion
*exp
;
607 aff_combination_zero (&to_add
, comb
->type
);
608 for (i
= 0; i
< comb
->n
; i
++)
613 e
= comb
->elts
[i
].val
;
614 type
= TREE_TYPE (e
);
616 /* Look through some conversions. */
617 if (TREE_CODE (e
) == NOP_EXPR
618 && (TYPE_PRECISION (type
)
619 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
620 name
= TREE_OPERAND (e
, 0);
621 if (TREE_CODE (name
) != SSA_NAME
)
623 def
= SSA_NAME_DEF_STMT (name
);
624 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
627 code
= gimple_assign_rhs_code (def
);
629 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
630 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
631 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
634 /* We do not know whether the reference retains its value at the
635 place where the expansion is used. */
636 if (TREE_CODE_CLASS (code
) == tcc_reference
)
640 *cache
= pointer_map_create ();
641 slot
= pointer_map_insert (*cache
, e
);
642 exp
= (struct name_expansion
*) *slot
;
646 exp
= XNEW (struct name_expansion
);
647 exp
->in_progress
= 1;
649 /* In principle this is a generally valid folding, but
650 it is not unconditionally an optimization, so do it
651 here and not in fold_unary. */
652 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
653 than the type of X and overflow for the type of X is
656 && INTEGRAL_TYPE_P (type
)
657 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
658 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
659 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
660 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
661 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
662 rhs
= fold_build2 (code
, type
,
663 fold_convert (type
, gimple_assign_rhs1 (def
)),
664 fold_convert (type
, gimple_assign_rhs2 (def
)));
667 rhs
= gimple_assign_rhs_to_tree (def
);
669 rhs
= fold_convert (type
, rhs
);
671 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
672 exp
->expansion
= current
;
673 exp
->in_progress
= 0;
677 /* Since we follow the definitions in the SSA form, we should not
678 enter a cycle unless we pass through a phi node. */
679 gcc_assert (!exp
->in_progress
);
680 current
= exp
->expansion
;
683 /* Accumulate the new terms to TO_ADD, so that we do not modify
684 COMB while traversing it; include the term -coef * E, to remove
686 scale
= comb
->elts
[i
].coef
;
687 aff_combination_zero (&curre
, comb
->type
);
688 aff_combination_add_elt (&curre
, e
, double_int_neg (scale
));
689 aff_combination_scale (¤t
, scale
);
690 aff_combination_add (&to_add
, ¤t
);
691 aff_combination_add (&to_add
, &curre
);
693 aff_combination_add (comb
, &to_add
);
696 /* Similar to tree_to_aff_combination, but follows SSA name definitions
697 and expands them recursively. CACHE is used to cache the expansions
698 of the ssa names, to avoid exponential time complexity for cases
707 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
708 struct pointer_map_t
**cache
)
710 tree_to_aff_combination (expr
, type
, comb
);
711 aff_combination_expand (comb
, cache
);
714 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
715 pointer_map_traverse. */
718 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
719 void *data ATTRIBUTE_UNUSED
)
721 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
727 /* Frees memory allocated for the CACHE used by
728 tree_to_aff_combination_expand. */
731 free_affine_expand_cache (struct pointer_map_t
**cache
)
736 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
737 pointer_map_destroy (*cache
);
741 /* If VAL != CST * DIV for any constant CST, returns false.
742 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
743 additionally compares CST and MULT, and if they are different,
744 returns false. Finally, if neither of these two cases occur,
745 true is returned, and if CST != 0, CST is stored to MULT and
746 MULT_SET is set to true. */
749 double_int_constant_multiple_p (double_int val
, double_int div
,
750 bool *mult_set
, double_int
*mult
)
754 if (double_int_zero_p (val
))
757 if (double_int_zero_p (div
))
760 cst
= double_int_sdivmod (val
, div
, FLOOR_DIV_EXPR
, &rem
);
761 if (!double_int_zero_p (rem
))
764 if (*mult_set
&& !double_int_equal_p (*mult
, cst
))
772 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
773 X is stored to MULT. */
776 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
779 bool mult_set
= false;
782 if (val
->n
== 0 && double_int_zero_p (val
->offset
))
784 *mult
= double_int_zero
;
787 if (val
->n
!= div
->n
)
790 if (val
->rest
|| div
->rest
)
793 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
797 for (i
= 0; i
< div
->n
; i
++)
799 struct aff_comb_elt
*elt
800 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
803 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
808 gcc_assert (mult_set
);
812 /* Prints the affine VAL to the FILE. */
815 print_aff (FILE *file
, aff_tree
*val
)
818 bool uns
= TYPE_UNSIGNED (val
->type
);
819 if (POINTER_TYPE_P (val
->type
))
821 fprintf (file
, "{\n type = ");
822 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
823 fprintf (file
, "\n offset = ");
824 dump_double_int (file
, val
->offset
, uns
);
827 fprintf (file
, "\n elements = {\n");
828 for (i
= 0; i
< val
->n
; i
++)
830 fprintf (file
, " [%d] = ", i
);
831 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
833 fprintf (file
, " * ");
834 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
836 fprintf (file
, ", \n");
838 fprintf (file
, "\n }");
842 fprintf (file
, "\n rest = ");
843 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
845 fprintf (file
, "\n}");
848 /* Prints the affine VAL to the standard error, used for debugging. */
851 debug_aff (aff_tree
*val
)
853 print_aff (stderr
, val
);
854 fprintf (stderr
, "\n");
857 /* Returns address of the reference REF in ADDR. The size of the accessed
858 location is stored to SIZE. */
861 get_inner_reference_aff (tree ref
, aff_tree
*addr
, double_int
*size
)
863 HOST_WIDE_INT bitsize
, bitpos
;
865 enum machine_mode mode
;
868 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
870 tree base_addr
= build_fold_addr_expr (base
);
872 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
874 tree_to_aff_combination (base_addr
, sizetype
, addr
);
878 tree_to_aff_combination (toff
, sizetype
, &tmp
);
879 aff_combination_add (addr
, &tmp
);
882 aff_combination_const (&tmp
, sizetype
,
883 shwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
884 aff_combination_add (addr
, &tmp
);
886 *size
= shwi_to_double_int ((bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
889 /* Returns true if a region of size SIZE1 at position 0 and a region of
890 size SIZE2 at position DIFF cannot overlap. */
893 aff_comb_cannot_overlap_p (aff_tree
*diff
, double_int size1
, double_int size2
)
897 /* Unless the difference is a constant, we fail. */
902 if (double_int_negative_p (d
))
904 /* The second object is before the first one, we succeed if the last
905 element of the second object is before the start of the first one. */
906 bound
= double_int_add (d
, double_int_add (size2
, double_int_minus_one
));
907 return double_int_negative_p (bound
);
911 /* We succeed if the second object starts after the first one ends. */
912 return double_int_scmp (size1
, d
) <= 0;