1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
33 #include "cfgexpand.h"
35 /* Extends CST as appropriate for the affine combinations COMB. */
38 wide_int_ext_for_comb (const widest_int
&cst
, tree type
)
40 return wi::sext (cst
, TYPE_PRECISION (type
));
43 /* Initializes affine combination COMB so that its value is zero in TYPE. */
46 aff_combination_zero (aff_tree
*comb
, tree type
)
52 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
53 comb
->elts
[i
].coef
= 0;
54 comb
->rest
= NULL_TREE
;
57 /* Sets COMB to CST. */
60 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
62 aff_combination_zero (comb
, type
);
63 comb
->offset
= wide_int_ext_for_comb (cst
, comb
->type
);;
66 /* Sets COMB to single element ELT. */
69 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
71 aff_combination_zero (comb
, type
);
74 comb
->elts
[0].val
= elt
;
75 comb
->elts
[0].coef
= 1;
78 /* Scales COMB by SCALE. */
81 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
85 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
91 aff_combination_zero (comb
, comb
->type
);
95 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
->type
);
96 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
99 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
->type
);
100 /* A coefficient may become zero due to overflow. Remove the zero
104 comb
->elts
[j
].coef
= new_coef
;
105 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
112 tree type
= comb
->type
;
113 if (POINTER_TYPE_P (type
))
115 if (comb
->n
< MAX_AFF_ELTS
)
117 comb
->elts
[comb
->n
].coef
= scale
;
118 comb
->elts
[comb
->n
].val
= comb
->rest
;
119 comb
->rest
= NULL_TREE
;
123 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
124 wide_int_to_tree (type
, scale
));
128 /* Adds ELT * SCALE to COMB. */
131 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
136 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
140 for (i
= 0; i
< comb
->n
; i
++)
141 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
144 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
->type
);
147 comb
->elts
[i
].coef
= new_coef
;
152 comb
->elts
[i
] = comb
->elts
[comb
->n
];
156 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
157 comb
->elts
[comb
->n
].coef
= 1;
158 comb
->elts
[comb
->n
].val
= comb
->rest
;
159 comb
->rest
= NULL_TREE
;
164 if (comb
->n
< MAX_AFF_ELTS
)
166 comb
->elts
[comb
->n
].coef
= scale
;
167 comb
->elts
[comb
->n
].val
= elt
;
173 if (POINTER_TYPE_P (type
))
177 elt
= fold_convert (type
, elt
);
179 elt
= fold_build2 (MULT_EXPR
, type
,
180 fold_convert (type
, elt
),
181 wide_int_to_tree (type
, scale
));
184 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
193 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
195 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
->type
);
198 /* Adds COMB2 to COMB1. */
201 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
205 aff_combination_add_cst (comb1
, comb2
->offset
);
206 for (i
= 0; i
< comb2
->n
; i
++)
207 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
209 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
212 /* Converts affine combination COMB to TYPE. */
215 aff_combination_convert (aff_tree
*comb
, tree type
)
218 tree comb_type
= comb
->type
;
220 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
222 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
223 tree_to_aff_combination (val
, type
, comb
);
228 if (comb
->rest
&& !POINTER_TYPE_P (type
))
229 comb
->rest
= fold_convert (type
, comb
->rest
);
231 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
234 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
->type
);
235 for (i
= j
= 0; i
< comb
->n
; i
++)
237 if (comb
->elts
[i
].coef
== 0)
239 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
240 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
245 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
247 comb
->elts
[comb
->n
].coef
= 1;
248 comb
->elts
[comb
->n
].val
= comb
->rest
;
249 comb
->rest
= NULL_TREE
;
254 /* Splits EXPR into an affine combination of parts. */
257 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
261 tree cst
, core
, toffset
;
262 HOST_WIDE_INT bitpos
, bitsize
;
264 int unsignedp
, reversep
, volatilep
;
268 code
= TREE_CODE (expr
);
272 aff_combination_const (comb
, type
, wi::to_widest (expr
));
275 case POINTER_PLUS_EXPR
:
276 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
277 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
278 aff_combination_add (comb
, &tmp
);
283 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
284 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
285 if (code
== MINUS_EXPR
)
286 aff_combination_scale (&tmp
, -1);
287 aff_combination_add (comb
, &tmp
);
291 cst
= TREE_OPERAND (expr
, 1);
292 if (TREE_CODE (cst
) != INTEGER_CST
)
294 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
295 aff_combination_scale (comb
, wi::to_widest (cst
));
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, -1);
305 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
306 aff_combination_scale (comb
, -1);
307 aff_combination_add_cst (comb
, -1);
311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
312 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
314 expr
= TREE_OPERAND (expr
, 0);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
316 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
317 aff_combination_add (comb
, &tmp
);
320 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
321 &toffset
, &mode
, &unsignedp
, &reversep
,
323 if (bitpos
% BITS_PER_UNIT
!= 0)
325 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
326 if (TREE_CODE (core
) == MEM_REF
)
328 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
329 core
= TREE_OPERAND (core
, 0);
332 core
= build_fold_addr_expr (core
);
334 if (TREE_CODE (core
) == ADDR_EXPR
)
335 aff_combination_add_elt (comb
, core
, 1);
338 tree_to_aff_combination (core
, type
, &tmp
);
339 aff_combination_add (comb
, &tmp
);
343 tree_to_aff_combination (toffset
, type
, &tmp
);
344 aff_combination_add (comb
, &tmp
);
349 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
350 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
352 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
354 aff_combination_elt (comb
, type
, expr
);
358 aff_combination_elt (comb
, type
,
359 build2 (MEM_REF
, TREE_TYPE (expr
),
360 TREE_OPERAND (expr
, 0),
362 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
363 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
364 aff_combination_add (comb
, &tmp
);
369 tree otype
= TREE_TYPE (expr
);
370 tree inner
= TREE_OPERAND (expr
, 0);
371 tree itype
= TREE_TYPE (inner
);
372 enum tree_code icode
= TREE_CODE (inner
);
374 /* In principle this is a valid folding, but it isn't necessarily
375 an optimization, so do it here and not in fold_unary. */
376 if ((icode
== PLUS_EXPR
|| icode
== MINUS_EXPR
|| icode
== MULT_EXPR
)
377 && TREE_CODE (itype
) == INTEGER_TYPE
378 && TREE_CODE (otype
) == INTEGER_TYPE
379 && TYPE_PRECISION (otype
) > TYPE_PRECISION (itype
))
381 tree op0
= TREE_OPERAND (inner
, 0), op1
= TREE_OPERAND (inner
, 1);
383 /* If inner type has undefined overflow behavior, fold conversion
385 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
386 (T1)(X + X) -> (T1)X + (T1)X. */
387 if (TYPE_OVERFLOW_UNDEFINED (itype
)
388 && (TREE_CODE (op1
) == INTEGER_CST
389 || (icode
== PLUS_EXPR
&& operand_equal_p (op0
, op1
, 0))))
391 op0
= fold_convert (otype
, op0
);
392 op1
= fold_convert (otype
, op1
);
393 expr
= fold_build2 (icode
, otype
, op0
, op1
);
394 tree_to_aff_combination (expr
, type
, comb
);
398 /* If inner type has wrapping overflow behavior, fold conversion
400 (T1)(X - CST) -> (T1)X - (T1)CST
401 if X - CST doesn't overflow by range information. Also handle
402 (T1)(X + CST) as (T1)(X - (-CST)). */
403 if (TYPE_UNSIGNED (itype
)
404 && TYPE_OVERFLOW_WRAPS (itype
)
405 && TREE_CODE (op0
) == SSA_NAME
406 && TREE_CODE (op1
) == INTEGER_CST
407 && icode
!= MULT_EXPR
408 && get_range_info (op0
, &minv
, &maxv
) == VR_RANGE
)
410 if (icode
== PLUS_EXPR
)
411 op1
= wide_int_to_tree (itype
, -wi::to_wide (op1
));
412 if (wi::geu_p (minv
, wi::to_wide (op1
)))
414 op0
= fold_convert (otype
, op0
);
415 op1
= fold_convert (otype
, op1
);
416 expr
= fold_build2 (MINUS_EXPR
, otype
, op0
, op1
);
417 tree_to_aff_combination (expr
, type
, comb
);
429 aff_combination_elt (comb
, type
, expr
);
432 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
436 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
)
440 widest_int scale
= wide_int_ext_for_comb (scale_in
, type
);
442 elt
= fold_convert (type
, elt
);
448 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
454 return fold_build1 (NEGATE_EXPR
, type
, elt
);
456 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
460 return fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
462 if (wi::neg_p (scale
))
470 elt
= fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
471 return fold_build2 (code
, type
, expr
, elt
);
474 /* Makes tree from the affine combination COMB. */
477 aff_combination_to_tree (aff_tree
*comb
)
479 tree type
= comb
->type
, base
= NULL_TREE
, expr
= NULL_TREE
;
483 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
486 if (POINTER_TYPE_P (type
))
489 if (comb
->n
> 0 && comb
->elts
[0].coef
== 1
490 && POINTER_TYPE_P (TREE_TYPE (comb
->elts
[0].val
)))
492 base
= comb
->elts
[0].val
;
497 for (; i
< comb
->n
; i
++)
498 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
);
501 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1);
503 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
505 if (wi::neg_p (comb
->offset
))
515 expr
= add_elt_to_tree (expr
, type
, wide_int_to_tree (type
, off
), sgn
);
518 return fold_build_pointer_plus (base
, expr
);
520 return fold_convert (comb
->type
, expr
);
523 /* Copies the tree elements of COMB to ensure that they are not shared. */
526 unshare_aff_combination (aff_tree
*comb
)
530 for (i
= 0; i
< comb
->n
; i
++)
531 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
533 comb
->rest
= unshare_expr (comb
->rest
);
536 /* Remove M-th element from COMB. */
539 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
543 comb
->elts
[m
] = comb
->elts
[comb
->n
];
546 comb
->elts
[comb
->n
].coef
= 1;
547 comb
->elts
[comb
->n
].val
= comb
->rest
;
548 comb
->rest
= NULL_TREE
;
553 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
554 C * COEF is added to R. */
558 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
564 for (i
= 0; i
< c
->n
; i
++)
566 aval
= c
->elts
[i
].val
;
569 type
= TREE_TYPE (aval
);
570 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
571 fold_convert (type
, val
));
574 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
582 type
= TREE_TYPE (aval
);
583 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
584 fold_convert (type
, val
));
587 aff_combination_add_elt (r
, aval
, coef
);
591 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
593 aff_combination_add_cst (r
, coef
* c
->offset
);
596 /* Multiplies C1 by C2, storing the result to R */
599 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
602 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
604 aff_combination_zero (r
, c1
->type
);
606 for (i
= 0; i
< c2
->n
; i
++)
607 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
609 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
610 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
613 /* Returns the element of COMB whose value is VAL, or NULL if no such
614 element exists. If IDX is not NULL, it is set to the index of VAL in
617 static struct aff_comb_elt
*
618 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
622 for (i
= 0; i
< comb
->n
; i
++)
623 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
628 return &comb
->elts
[i
];
634 /* Element of the cache that maps ssa name NAME to its expanded form
635 as an affine expression EXPANSION. */
637 struct name_expansion
641 /* True if the expansion for the name is just being generated. */
642 unsigned in_progress
: 1;
645 /* Expands SSA names in COMB recursively. CACHE is used to cache the
649 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
650 hash_map
<tree
, name_expansion
*> **cache
)
653 aff_tree to_add
, current
, curre
;
657 struct name_expansion
*exp
;
659 aff_combination_zero (&to_add
, comb
->type
);
660 for (i
= 0; i
< comb
->n
; i
++)
665 e
= comb
->elts
[i
].val
;
666 type
= TREE_TYPE (e
);
668 /* Look through some conversions. */
669 if (CONVERT_EXPR_P (e
)
670 && (TYPE_PRECISION (type
)
671 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
672 name
= TREE_OPERAND (e
, 0);
673 if (TREE_CODE (name
) != SSA_NAME
)
675 def
= SSA_NAME_DEF_STMT (name
);
676 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
679 code
= gimple_assign_rhs_code (def
);
681 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
682 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
683 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
686 /* We do not know whether the reference retains its value at the
687 place where the expansion is used. */
688 if (TREE_CODE_CLASS (code
) == tcc_reference
)
692 *cache
= new hash_map
<tree
, name_expansion
*>;
693 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
698 exp
= XNEW (struct name_expansion
);
699 exp
->in_progress
= 1;
701 rhs
= gimple_assign_rhs_to_tree (def
);
703 rhs
= fold_convert (type
, rhs
);
705 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
706 exp
->expansion
= current
;
707 exp
->in_progress
= 0;
711 /* Since we follow the definitions in the SSA form, we should not
712 enter a cycle unless we pass through a phi node. */
713 gcc_assert (!exp
->in_progress
);
714 current
= exp
->expansion
;
717 /* Accumulate the new terms to TO_ADD, so that we do not modify
718 COMB while traversing it; include the term -coef * E, to remove
720 scale
= comb
->elts
[i
].coef
;
721 aff_combination_zero (&curre
, comb
->type
);
722 aff_combination_add_elt (&curre
, e
, -scale
);
723 aff_combination_scale (¤t
, scale
);
724 aff_combination_add (&to_add
, ¤t
);
725 aff_combination_add (&to_add
, &curre
);
727 aff_combination_add (comb
, &to_add
);
730 /* Similar to tree_to_aff_combination, but follows SSA name definitions
731 and expands them recursively. CACHE is used to cache the expansions
732 of the ssa names, to avoid exponential time complexity for cases
741 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
742 hash_map
<tree
, name_expansion
*> **cache
)
744 tree_to_aff_combination (expr
, type
, comb
);
745 aff_combination_expand (comb
, cache
);
748 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
749 hash_map::traverse. */
752 free_name_expansion (tree
const &, name_expansion
**value
, void *)
758 /* Frees memory allocated for the CACHE used by
759 tree_to_aff_combination_expand. */
762 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
767 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
772 /* If VAL != CST * DIV for any constant CST, returns false.
773 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
774 and if they are different, returns false. Finally, if neither of these
775 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
779 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
780 bool *mult_set
, widest_int
*mult
)
786 if (*mult_set
&& *mult
!= 0)
796 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
799 if (*mult_set
&& *mult
!= cst
)
807 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
808 X is stored to MULT. */
811 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
814 bool mult_set
= false;
817 if (val
->n
== 0 && val
->offset
== 0)
822 if (val
->n
!= div
->n
)
825 if (val
->rest
|| div
->rest
)
828 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
832 for (i
= 0; i
< div
->n
; i
++)
834 struct aff_comb_elt
*elt
835 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
838 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
843 gcc_assert (mult_set
);
847 /* Prints the affine VAL to the FILE. */
850 print_aff (FILE *file
, aff_tree
*val
)
853 signop sgn
= TYPE_SIGN (val
->type
);
854 if (POINTER_TYPE_P (val
->type
))
856 fprintf (file
, "{\n type = ");
857 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
858 fprintf (file
, "\n offset = ");
859 print_dec (val
->offset
, file
, sgn
);
862 fprintf (file
, "\n elements = {\n");
863 for (i
= 0; i
< val
->n
; i
++)
865 fprintf (file
, " [%d] = ", i
);
866 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
868 fprintf (file
, " * ");
869 print_dec (val
->elts
[i
].coef
, file
, sgn
);
871 fprintf (file
, ", \n");
873 fprintf (file
, "\n }");
877 fprintf (file
, "\n rest = ");
878 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
880 fprintf (file
, "\n}");
883 /* Prints the affine VAL to the standard error, used for debugging. */
886 debug_aff (aff_tree
*val
)
888 print_aff (stderr
, val
);
889 fprintf (stderr
, "\n");
892 /* Computes address of the reference REF in ADDR. The size of the accessed
893 location is stored to SIZE. Returns the ultimate containing object to
897 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
899 HOST_WIDE_INT bitsize
, bitpos
;
904 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
906 tree base_addr
= build_fold_addr_expr (base
);
908 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
910 tree_to_aff_combination (base_addr
, sizetype
, addr
);
914 tree_to_aff_combination (toff
, sizetype
, &tmp
);
915 aff_combination_add (addr
, &tmp
);
918 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
919 aff_combination_add (addr
, &tmp
);
921 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
926 /* Returns true if a region of size SIZE1 at position 0 and a region of
927 size SIZE2 at position DIFF cannot overlap. */
930 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
931 const widest_int
&size2
)
933 /* Unless the difference is a constant, we fail. */
937 if (wi::neg_p (diff
->offset
))
939 /* The second object is before the first one, we succeed if the last
940 element of the second object is before the start of the first one. */
941 return wi::neg_p (diff
->offset
+ size2
- 1);
945 /* We succeed if the second object starts after the first one ends. */
946 return size1
<= diff
->offset
;