1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "tree-pretty-print.h"
25 #include "pointer-set.h"
26 #include "tree-affine.h"
30 #include "wide-int-print.h"
33 /* Extends CST as appropriate for the affine combinations COMB. */
36 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
38 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
41 /* Initializes affine combination COMB so that its value is zero in TYPE. */
44 aff_combination_zero (aff_tree
*comb
, tree type
)
50 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
51 comb
->elts
[i
].coef
= 0;
52 comb
->rest
= NULL_TREE
;
55 /* Sets COMB to CST. */
58 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
60 aff_combination_zero (comb
, type
);
61 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
64 /* Sets COMB to single element ELT. */
67 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
69 aff_combination_zero (comb
, type
);
72 comb
->elts
[0].val
= elt
;
73 comb
->elts
[0].coef
= 1;
76 /* Scales COMB by SCALE. */
79 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
83 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
89 aff_combination_zero (comb
, comb
->type
);
93 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
94 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
97 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
98 /* A coefficient may become zero due to overflow. Remove the zero
102 comb
->elts
[j
].coef
= new_coef
;
103 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
110 tree type
= comb
->type
;
111 if (POINTER_TYPE_P (type
))
113 if (comb
->n
< MAX_AFF_ELTS
)
115 comb
->elts
[comb
->n
].coef
= scale
;
116 comb
->elts
[comb
->n
].val
= comb
->rest
;
117 comb
->rest
= NULL_TREE
;
121 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
122 wide_int_to_tree (type
, scale
));
126 /* Adds ELT * SCALE to COMB. */
129 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
134 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
138 for (i
= 0; i
< comb
->n
; i
++)
139 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
142 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
145 comb
->elts
[i
].coef
= new_coef
;
150 comb
->elts
[i
] = comb
->elts
[comb
->n
];
154 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
155 comb
->elts
[comb
->n
].coef
= 1;
156 comb
->elts
[comb
->n
].val
= comb
->rest
;
157 comb
->rest
= NULL_TREE
;
162 if (comb
->n
< MAX_AFF_ELTS
)
164 comb
->elts
[comb
->n
].coef
= scale
;
165 comb
->elts
[comb
->n
].val
= elt
;
171 if (POINTER_TYPE_P (type
))
175 elt
= fold_convert (type
, elt
);
177 elt
= fold_build2 (MULT_EXPR
, type
,
178 fold_convert (type
, elt
),
179 wide_int_to_tree (type
, scale
));
182 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
191 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
193 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
196 /* Adds COMB2 to COMB1. */
199 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
203 aff_combination_add_cst (comb1
, comb2
->offset
);
204 for (i
= 0; i
< comb2
->n
; i
++)
205 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
207 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
210 /* Converts affine combination COMB to TYPE. */
213 aff_combination_convert (aff_tree
*comb
, tree type
)
216 tree comb_type
= comb
->type
;
218 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
220 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
221 tree_to_aff_combination (val
, type
, comb
);
226 if (comb
->rest
&& !POINTER_TYPE_P (type
))
227 comb
->rest
= fold_convert (type
, comb
->rest
);
229 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
232 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
233 for (i
= j
= 0; i
< comb
->n
; i
++)
235 if (comb
->elts
[i
].coef
== 0)
237 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
238 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
243 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
245 comb
->elts
[comb
->n
].coef
= 1;
246 comb
->elts
[comb
->n
].val
= comb
->rest
;
247 comb
->rest
= NULL_TREE
;
252 /* Splits EXPR into an affine combination of parts. */
255 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
259 tree cst
, core
, toffset
;
260 HOST_WIDE_INT bitpos
, bitsize
;
261 enum machine_mode mode
;
262 int unsignedp
, volatilep
;
266 code
= TREE_CODE (expr
);
270 aff_combination_const (comb
, type
, wi::to_widest (expr
));
273 case POINTER_PLUS_EXPR
:
274 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
275 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
276 aff_combination_add (comb
, &tmp
);
281 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
283 if (code
== MINUS_EXPR
)
284 aff_combination_scale (&tmp
, -1);
285 aff_combination_add (comb
, &tmp
);
289 cst
= TREE_OPERAND (expr
, 1);
290 if (TREE_CODE (cst
) != INTEGER_CST
)
292 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
293 aff_combination_scale (comb
, wi::to_widest (cst
));
297 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
298 aff_combination_scale (comb
, -1);
303 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
304 aff_combination_scale (comb
, -1);
305 aff_combination_add_cst (comb
, -1);
309 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
310 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
312 expr
= TREE_OPERAND (expr
, 0);
313 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
314 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
315 aff_combination_add (comb
, &tmp
);
318 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
319 &toffset
, &mode
, &unsignedp
, &volatilep
,
321 if (bitpos
% BITS_PER_UNIT
!= 0)
323 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
324 core
= build_fold_addr_expr (core
);
325 if (TREE_CODE (core
) == ADDR_EXPR
)
326 aff_combination_add_elt (comb
, core
, 1);
329 tree_to_aff_combination (core
, type
, &tmp
);
330 aff_combination_add (comb
, &tmp
);
334 tree_to_aff_combination (toffset
, type
, &tmp
);
335 aff_combination_add (comb
, &tmp
);
340 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
341 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
343 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
345 aff_combination_elt (comb
, type
, expr
);
349 aff_combination_elt (comb
, type
,
350 build2 (MEM_REF
, TREE_TYPE (expr
),
351 TREE_OPERAND (expr
, 0),
353 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
354 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
355 aff_combination_add (comb
, &tmp
);
362 aff_combination_elt (comb
, type
, expr
);
365 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
369 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
370 aff_tree
*comb ATTRIBUTE_UNUSED
)
374 if (POINTER_TYPE_P (type
))
377 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
380 && POINTER_TYPE_P (TREE_TYPE (elt
)))
382 elt
= convert_to_ptrofftype (elt
);
383 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
391 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
394 return fold_convert (type1
, elt
);
397 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
398 return fold_build_pointer_plus (expr
, elt
);
399 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
400 return fold_build_pointer_plus (elt
, expr
);
401 return fold_build2 (PLUS_EXPR
, type1
,
402 expr
, fold_convert (type1
, elt
));
408 return fold_build1 (NEGATE_EXPR
, type1
,
409 fold_convert (type1
, elt
));
411 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
413 elt
= convert_to_ptrofftype (elt
);
414 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
415 return fold_build_pointer_plus (expr
, elt
);
417 return fold_build2 (MINUS_EXPR
, type1
,
418 expr
, fold_convert (type1
, elt
));
421 elt
= fold_convert (type1
, elt
);
423 return fold_build2 (MULT_EXPR
, type1
, elt
,
424 wide_int_to_tree (type1
, scale
));
426 if (wi::neg_p (scale
))
434 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
435 wide_int_to_tree (type1
, scale
));
436 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
438 if (code
== MINUS_EXPR
)
439 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
440 return fold_build_pointer_plus (expr
, elt
);
442 return fold_build2 (code
, type1
, expr
, elt
);
445 /* Makes tree from the affine combination COMB. */
448 aff_combination_to_tree (aff_tree
*comb
)
450 tree type
= comb
->type
;
451 tree expr
= NULL_TREE
;
455 if (POINTER_TYPE_P (type
))
458 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
460 for (i
= 0; i
< comb
->n
; i
++)
461 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
465 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
467 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
469 if (wi::neg_p (comb
->offset
))
479 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
483 /* Copies the tree elements of COMB to ensure that they are not shared. */
486 unshare_aff_combination (aff_tree
*comb
)
490 for (i
= 0; i
< comb
->n
; i
++)
491 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
493 comb
->rest
= unshare_expr (comb
->rest
);
496 /* Remove M-th element from COMB. */
499 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
503 comb
->elts
[m
] = comb
->elts
[comb
->n
];
506 comb
->elts
[comb
->n
].coef
= 1;
507 comb
->elts
[comb
->n
].val
= comb
->rest
;
508 comb
->rest
= NULL_TREE
;
513 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
514 C * COEF is added to R. */
518 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
524 for (i
= 0; i
< c
->n
; i
++)
526 aval
= c
->elts
[i
].val
;
529 type
= TREE_TYPE (aval
);
530 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
531 fold_convert (type
, val
));
534 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
542 type
= TREE_TYPE (aval
);
543 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
544 fold_convert (type
, val
));
547 aff_combination_add_elt (r
, aval
, coef
);
551 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
553 aff_combination_add_cst (r
, coef
* c
->offset
);
556 /* Multiplies C1 by C2, storing the result to R */
559 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
562 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
564 aff_combination_zero (r
, c1
->type
);
566 for (i
= 0; i
< c2
->n
; i
++)
567 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
569 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
570 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
573 /* Returns the element of COMB whose value is VAL, or NULL if no such
574 element exists. If IDX is not NULL, it is set to the index of VAL in
577 static struct aff_comb_elt
*
578 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
582 for (i
= 0; i
< comb
->n
; i
++)
583 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
588 return &comb
->elts
[i
];
594 /* Element of the cache that maps ssa name NAME to its expanded form
595 as an affine expression EXPANSION. */
597 struct name_expansion
601 /* True if the expansion for the name is just being generated. */
602 unsigned in_progress
: 1;
605 /* Expands SSA names in COMB recursively. CACHE is used to cache the
609 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
610 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
613 aff_tree to_add
, current
, curre
;
618 struct name_expansion
*exp
;
620 aff_combination_zero (&to_add
, comb
->type
);
621 for (i
= 0; i
< comb
->n
; i
++)
626 e
= comb
->elts
[i
].val
;
627 type
= TREE_TYPE (e
);
629 /* Look through some conversions. */
630 if (TREE_CODE (e
) == NOP_EXPR
631 && (TYPE_PRECISION (type
)
632 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
633 name
= TREE_OPERAND (e
, 0);
634 if (TREE_CODE (name
) != SSA_NAME
)
636 def
= SSA_NAME_DEF_STMT (name
);
637 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
640 code
= gimple_assign_rhs_code (def
);
642 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
643 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
644 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
647 /* We do not know whether the reference retains its value at the
648 place where the expansion is used. */
649 if (TREE_CODE_CLASS (code
) == tcc_reference
)
653 *cache
= pointer_map_create ();
654 slot
= pointer_map_insert (*cache
, e
);
655 exp
= (struct name_expansion
*) *slot
;
659 exp
= XNEW (struct name_expansion
);
660 exp
->in_progress
= 1;
662 /* In principle this is a generally valid folding, but
663 it is not unconditionally an optimization, so do it
664 here and not in fold_unary. */
665 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
666 than the type of X and overflow for the type of X is
669 && INTEGRAL_TYPE_P (type
)
670 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
671 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
672 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
673 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
674 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
675 rhs
= fold_build2 (code
, type
,
676 fold_convert (type
, gimple_assign_rhs1 (def
)),
677 fold_convert (type
, gimple_assign_rhs2 (def
)));
680 rhs
= gimple_assign_rhs_to_tree (def
);
682 rhs
= fold_convert (type
, rhs
);
684 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
685 exp
->expansion
= current
;
686 exp
->in_progress
= 0;
690 /* Since we follow the definitions in the SSA form, we should not
691 enter a cycle unless we pass through a phi node. */
692 gcc_assert (!exp
->in_progress
);
693 current
= exp
->expansion
;
696 /* Accumulate the new terms to TO_ADD, so that we do not modify
697 COMB while traversing it; include the term -coef * E, to remove
699 scale
= comb
->elts
[i
].coef
;
700 aff_combination_zero (&curre
, comb
->type
);
701 aff_combination_add_elt (&curre
, e
, -scale
);
702 aff_combination_scale (¤t
, scale
);
703 aff_combination_add (&to_add
, ¤t
);
704 aff_combination_add (&to_add
, &curre
);
706 aff_combination_add (comb
, &to_add
);
709 /* Similar to tree_to_aff_combination, but follows SSA name definitions
710 and expands them recursively. CACHE is used to cache the expansions
711 of the ssa names, to avoid exponential time complexity for cases
720 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
721 struct pointer_map_t
**cache
)
723 tree_to_aff_combination (expr
, type
, comb
);
724 aff_combination_expand (comb
, cache
);
727 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
728 pointer_map_traverse. */
731 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
732 void *data ATTRIBUTE_UNUSED
)
734 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
740 /* Frees memory allocated for the CACHE used by
741 tree_to_aff_combination_expand. */
744 free_affine_expand_cache (struct pointer_map_t
**cache
)
749 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
750 pointer_map_destroy (*cache
);
754 /* If VAL != CST * DIV for any constant CST, returns false.
755 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
756 and if they are different, returns false. Finally, if neither of these
757 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
761 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
762 bool *mult_set
, widest_int
*mult
)
768 if (*mult_set
&& mult
!= 0)
778 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
781 if (*mult_set
&& *mult
!= cst
)
789 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
790 X is stored to MULT. */
793 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
796 bool mult_set
= false;
799 if (val
->n
== 0 && val
->offset
== 0)
804 if (val
->n
!= div
->n
)
807 if (val
->rest
|| div
->rest
)
810 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
814 for (i
= 0; i
< div
->n
; i
++)
816 struct aff_comb_elt
*elt
817 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
820 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
825 gcc_assert (mult_set
);
829 /* Prints the affine VAL to the FILE. */
832 print_aff (FILE *file
, aff_tree
*val
)
835 signop sgn
= TYPE_SIGN (val
->type
);
836 if (POINTER_TYPE_P (val
->type
))
838 fprintf (file
, "{\n type = ");
839 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
840 fprintf (file
, "\n offset = ");
841 print_dec (val
->offset
, file
, sgn
);
844 fprintf (file
, "\n elements = {\n");
845 for (i
= 0; i
< val
->n
; i
++)
847 fprintf (file
, " [%d] = ", i
);
848 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
850 fprintf (file
, " * ");
851 print_dec (val
->elts
[i
].coef
, file
, sgn
);
853 fprintf (file
, ", \n");
855 fprintf (file
, "\n }");
859 fprintf (file
, "\n rest = ");
860 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
862 fprintf (file
, "\n}");
865 /* Prints the affine VAL to the standard error, used for debugging. */
868 debug_aff (aff_tree
*val
)
870 print_aff (stderr
, val
);
871 fprintf (stderr
, "\n");
874 /* Returns address of the reference REF in ADDR. The size of the accessed
875 location is stored to SIZE. */
878 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
880 HOST_WIDE_INT bitsize
, bitpos
;
882 enum machine_mode mode
;
885 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
887 tree base_addr
= build_fold_addr_expr (base
);
889 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
891 tree_to_aff_combination (base_addr
, sizetype
, addr
);
895 tree_to_aff_combination (toff
, sizetype
, &tmp
);
896 aff_combination_add (addr
, &tmp
);
899 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
900 aff_combination_add (addr
, &tmp
);
902 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
905 /* Returns true if a region of size SIZE1 at position 0 and a region of
906 size SIZE2 at position DIFF cannot overlap. */
909 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
910 const widest_int
&size2
)
912 /* Unless the difference is a constant, we fail. */
916 if (wi::neg_p (diff
->offset
))
918 /* The second object is before the first one, we succeed if the last
919 element of the second object is before the start of the first one. */
920 return wi::neg_p (diff
->offset
+ size2
- 1);
924 /* We succeed if the second object starts after the first one ends. */
925 return wi::les_p (size1
, diff
->offset
);