1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "pointer-set.h"
27 #include "tree-affine.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "cfgexpand.h"
38 #include "wide-int-print.h"
40 /* Extends CST as appropriate for the affine combinations COMB. */
43 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
45 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
48 /* Initializes affine combination COMB so that its value is zero in TYPE. */
51 aff_combination_zero (aff_tree
*comb
, tree type
)
57 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
58 comb
->elts
[i
].coef
= 0;
59 comb
->rest
= NULL_TREE
;
62 /* Sets COMB to CST. */
65 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
67 aff_combination_zero (comb
, type
);
68 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
71 /* Sets COMB to single element ELT. */
74 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
76 aff_combination_zero (comb
, type
);
79 comb
->elts
[0].val
= elt
;
80 comb
->elts
[0].coef
= 1;
83 /* Scales COMB by SCALE. */
86 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
90 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
96 aff_combination_zero (comb
, comb
->type
);
100 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
101 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
104 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
105 /* A coefficient may become zero due to overflow. Remove the zero
109 comb
->elts
[j
].coef
= new_coef
;
110 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
117 tree type
= comb
->type
;
118 if (POINTER_TYPE_P (type
))
120 if (comb
->n
< MAX_AFF_ELTS
)
122 comb
->elts
[comb
->n
].coef
= scale
;
123 comb
->elts
[comb
->n
].val
= comb
->rest
;
124 comb
->rest
= NULL_TREE
;
128 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
129 wide_int_to_tree (type
, scale
));
133 /* Adds ELT * SCALE to COMB. */
136 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
141 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
145 for (i
= 0; i
< comb
->n
; i
++)
146 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
149 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
152 comb
->elts
[i
].coef
= new_coef
;
157 comb
->elts
[i
] = comb
->elts
[comb
->n
];
161 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
162 comb
->elts
[comb
->n
].coef
= 1;
163 comb
->elts
[comb
->n
].val
= comb
->rest
;
164 comb
->rest
= NULL_TREE
;
169 if (comb
->n
< MAX_AFF_ELTS
)
171 comb
->elts
[comb
->n
].coef
= scale
;
172 comb
->elts
[comb
->n
].val
= elt
;
178 if (POINTER_TYPE_P (type
))
182 elt
= fold_convert (type
, elt
);
184 elt
= fold_build2 (MULT_EXPR
, type
,
185 fold_convert (type
, elt
),
186 wide_int_to_tree (type
, scale
));
189 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
198 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
200 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
203 /* Adds COMB2 to COMB1. */
206 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
210 aff_combination_add_cst (comb1
, comb2
->offset
);
211 for (i
= 0; i
< comb2
->n
; i
++)
212 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
214 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
217 /* Converts affine combination COMB to TYPE. */
220 aff_combination_convert (aff_tree
*comb
, tree type
)
223 tree comb_type
= comb
->type
;
225 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
227 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
228 tree_to_aff_combination (val
, type
, comb
);
233 if (comb
->rest
&& !POINTER_TYPE_P (type
))
234 comb
->rest
= fold_convert (type
, comb
->rest
);
236 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
239 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
240 for (i
= j
= 0; i
< comb
->n
; i
++)
242 if (comb
->elts
[i
].coef
== 0)
244 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
245 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
250 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
252 comb
->elts
[comb
->n
].coef
= 1;
253 comb
->elts
[comb
->n
].val
= comb
->rest
;
254 comb
->rest
= NULL_TREE
;
259 /* Splits EXPR into an affine combination of parts. */
262 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
266 tree cst
, core
, toffset
;
267 HOST_WIDE_INT bitpos
, bitsize
;
268 enum machine_mode mode
;
269 int unsignedp
, volatilep
;
273 code
= TREE_CODE (expr
);
277 aff_combination_const (comb
, type
, wi::to_widest (expr
));
280 case POINTER_PLUS_EXPR
:
281 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
283 aff_combination_add (comb
, &tmp
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
289 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
290 if (code
== MINUS_EXPR
)
291 aff_combination_scale (&tmp
, -1);
292 aff_combination_add (comb
, &tmp
);
296 cst
= TREE_OPERAND (expr
, 1);
297 if (TREE_CODE (cst
) != INTEGER_CST
)
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, wi::to_widest (cst
));
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, -1);
310 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
311 aff_combination_scale (comb
, -1);
312 aff_combination_add_cst (comb
, -1);
316 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
317 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
319 expr
= TREE_OPERAND (expr
, 0);
320 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
321 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
322 aff_combination_add (comb
, &tmp
);
325 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
326 &toffset
, &mode
, &unsignedp
, &volatilep
,
328 if (bitpos
% BITS_PER_UNIT
!= 0)
330 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
331 if (TREE_CODE (core
) == MEM_REF
)
333 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
334 core
= TREE_OPERAND (core
, 0);
337 core
= build_fold_addr_expr (core
);
339 if (TREE_CODE (core
) == ADDR_EXPR
)
340 aff_combination_add_elt (comb
, core
, 1);
343 tree_to_aff_combination (core
, type
, &tmp
);
344 aff_combination_add (comb
, &tmp
);
348 tree_to_aff_combination (toffset
, type
, &tmp
);
349 aff_combination_add (comb
, &tmp
);
354 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
355 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
357 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
359 aff_combination_elt (comb
, type
, expr
);
363 aff_combination_elt (comb
, type
,
364 build2 (MEM_REF
, TREE_TYPE (expr
),
365 TREE_OPERAND (expr
, 0),
367 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
368 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
369 aff_combination_add (comb
, &tmp
);
376 aff_combination_elt (comb
, type
, expr
);
379 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
383 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
384 aff_tree
*comb ATTRIBUTE_UNUSED
)
388 if (POINTER_TYPE_P (type
))
391 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
394 && POINTER_TYPE_P (TREE_TYPE (elt
)))
396 elt
= convert_to_ptrofftype (elt
);
397 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
405 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
408 return fold_convert (type1
, elt
);
411 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
412 return fold_build_pointer_plus (expr
, elt
);
413 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
414 return fold_build_pointer_plus (elt
, expr
);
415 return fold_build2 (PLUS_EXPR
, type1
,
416 expr
, fold_convert (type1
, elt
));
422 return fold_build1 (NEGATE_EXPR
, type1
,
423 fold_convert (type1
, elt
));
425 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
427 elt
= convert_to_ptrofftype (elt
);
428 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
429 return fold_build_pointer_plus (expr
, elt
);
431 return fold_build2 (MINUS_EXPR
, type1
,
432 expr
, fold_convert (type1
, elt
));
435 elt
= fold_convert (type1
, elt
);
437 return fold_build2 (MULT_EXPR
, type1
, elt
,
438 wide_int_to_tree (type1
, scale
));
440 if (wi::neg_p (scale
))
448 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
449 wide_int_to_tree (type1
, scale
));
450 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
452 if (code
== MINUS_EXPR
)
453 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
454 return fold_build_pointer_plus (expr
, elt
);
456 return fold_build2 (code
, type1
, expr
, elt
);
459 /* Makes tree from the affine combination COMB. */
462 aff_combination_to_tree (aff_tree
*comb
)
464 tree type
= comb
->type
;
465 tree expr
= NULL_TREE
;
469 if (POINTER_TYPE_P (type
))
472 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
474 for (i
= 0; i
< comb
->n
; i
++)
475 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
479 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
481 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
483 if (wi::neg_p (comb
->offset
))
493 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
497 /* Copies the tree elements of COMB to ensure that they are not shared. */
500 unshare_aff_combination (aff_tree
*comb
)
504 for (i
= 0; i
< comb
->n
; i
++)
505 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
507 comb
->rest
= unshare_expr (comb
->rest
);
510 /* Remove M-th element from COMB. */
513 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
517 comb
->elts
[m
] = comb
->elts
[comb
->n
];
520 comb
->elts
[comb
->n
].coef
= 1;
521 comb
->elts
[comb
->n
].val
= comb
->rest
;
522 comb
->rest
= NULL_TREE
;
527 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
528 C * COEF is added to R. */
532 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
538 for (i
= 0; i
< c
->n
; i
++)
540 aval
= c
->elts
[i
].val
;
543 type
= TREE_TYPE (aval
);
544 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
545 fold_convert (type
, val
));
548 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
556 type
= TREE_TYPE (aval
);
557 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
558 fold_convert (type
, val
));
561 aff_combination_add_elt (r
, aval
, coef
);
565 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
567 aff_combination_add_cst (r
, coef
* c
->offset
);
570 /* Multiplies C1 by C2, storing the result to R */
573 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
576 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
578 aff_combination_zero (r
, c1
->type
);
580 for (i
= 0; i
< c2
->n
; i
++)
581 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
583 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
584 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
587 /* Returns the element of COMB whose value is VAL, or NULL if no such
588 element exists. If IDX is not NULL, it is set to the index of VAL in
591 static struct aff_comb_elt
*
592 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
596 for (i
= 0; i
< comb
->n
; i
++)
597 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
602 return &comb
->elts
[i
];
608 /* Element of the cache that maps ssa name NAME to its expanded form
609 as an affine expression EXPANSION. */
611 struct name_expansion
615 /* True if the expansion for the name is just being generated. */
616 unsigned in_progress
: 1;
619 /* Expands SSA names in COMB recursively. CACHE is used to cache the
623 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
624 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
627 aff_tree to_add
, current
, curre
;
632 struct name_expansion
*exp
;
634 aff_combination_zero (&to_add
, comb
->type
);
635 for (i
= 0; i
< comb
->n
; i
++)
640 e
= comb
->elts
[i
].val
;
641 type
= TREE_TYPE (e
);
643 /* Look through some conversions. */
644 if (TREE_CODE (e
) == NOP_EXPR
645 && (TYPE_PRECISION (type
)
646 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
647 name
= TREE_OPERAND (e
, 0);
648 if (TREE_CODE (name
) != SSA_NAME
)
650 def
= SSA_NAME_DEF_STMT (name
);
651 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
654 code
= gimple_assign_rhs_code (def
);
656 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
657 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
658 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
661 /* We do not know whether the reference retains its value at the
662 place where the expansion is used. */
663 if (TREE_CODE_CLASS (code
) == tcc_reference
)
667 *cache
= pointer_map_create ();
668 slot
= pointer_map_insert (*cache
, e
);
669 exp
= (struct name_expansion
*) *slot
;
673 exp
= XNEW (struct name_expansion
);
674 exp
->in_progress
= 1;
676 /* In principle this is a generally valid folding, but
677 it is not unconditionally an optimization, so do it
678 here and not in fold_unary. */
679 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
680 than the type of X and overflow for the type of X is
683 && INTEGRAL_TYPE_P (type
)
684 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
685 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
686 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
687 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
688 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
689 rhs
= fold_build2 (code
, type
,
690 fold_convert (type
, gimple_assign_rhs1 (def
)),
691 fold_convert (type
, gimple_assign_rhs2 (def
)));
694 rhs
= gimple_assign_rhs_to_tree (def
);
696 rhs
= fold_convert (type
, rhs
);
698 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
699 exp
->expansion
= current
;
700 exp
->in_progress
= 0;
704 /* Since we follow the definitions in the SSA form, we should not
705 enter a cycle unless we pass through a phi node. */
706 gcc_assert (!exp
->in_progress
);
707 current
= exp
->expansion
;
710 /* Accumulate the new terms to TO_ADD, so that we do not modify
711 COMB while traversing it; include the term -coef * E, to remove
713 scale
= comb
->elts
[i
].coef
;
714 aff_combination_zero (&curre
, comb
->type
);
715 aff_combination_add_elt (&curre
, e
, -scale
);
716 aff_combination_scale (¤t
, scale
);
717 aff_combination_add (&to_add
, ¤t
);
718 aff_combination_add (&to_add
, &curre
);
720 aff_combination_add (comb
, &to_add
);
723 /* Similar to tree_to_aff_combination, but follows SSA name definitions
724 and expands them recursively. CACHE is used to cache the expansions
725 of the ssa names, to avoid exponential time complexity for cases
734 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
735 struct pointer_map_t
**cache
)
737 tree_to_aff_combination (expr
, type
, comb
);
738 aff_combination_expand (comb
, cache
);
741 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
742 pointer_map_traverse. */
745 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
746 void *data ATTRIBUTE_UNUSED
)
748 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
754 /* Frees memory allocated for the CACHE used by
755 tree_to_aff_combination_expand. */
758 free_affine_expand_cache (struct pointer_map_t
**cache
)
763 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
764 pointer_map_destroy (*cache
);
768 /* If VAL != CST * DIV for any constant CST, returns false.
769 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
770 and if they are different, returns false. Finally, if neither of these
771 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
775 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
776 bool *mult_set
, widest_int
*mult
)
782 if (*mult_set
&& mult
!= 0)
792 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
795 if (*mult_set
&& *mult
!= cst
)
803 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
804 X is stored to MULT. */
807 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
810 bool mult_set
= false;
813 if (val
->n
== 0 && val
->offset
== 0)
818 if (val
->n
!= div
->n
)
821 if (val
->rest
|| div
->rest
)
824 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
828 for (i
= 0; i
< div
->n
; i
++)
830 struct aff_comb_elt
*elt
831 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
834 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
839 gcc_assert (mult_set
);
843 /* Prints the affine VAL to the FILE. */
846 print_aff (FILE *file
, aff_tree
*val
)
849 signop sgn
= TYPE_SIGN (val
->type
);
850 if (POINTER_TYPE_P (val
->type
))
852 fprintf (file
, "{\n type = ");
853 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
854 fprintf (file
, "\n offset = ");
855 print_dec (val
->offset
, file
, sgn
);
858 fprintf (file
, "\n elements = {\n");
859 for (i
= 0; i
< val
->n
; i
++)
861 fprintf (file
, " [%d] = ", i
);
862 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
864 fprintf (file
, " * ");
865 print_dec (val
->elts
[i
].coef
, file
, sgn
);
867 fprintf (file
, ", \n");
869 fprintf (file
, "\n }");
873 fprintf (file
, "\n rest = ");
874 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
876 fprintf (file
, "\n}");
879 /* Prints the affine VAL to the standard error, used for debugging. */
882 debug_aff (aff_tree
*val
)
884 print_aff (stderr
, val
);
885 fprintf (stderr
, "\n");
888 /* Computes address of the reference REF in ADDR. The size of the accessed
889 location is stored to SIZE. Returns the ultimate containing object to
893 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
895 HOST_WIDE_INT bitsize
, bitpos
;
897 enum machine_mode mode
;
900 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
902 tree base_addr
= build_fold_addr_expr (base
);
904 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
906 tree_to_aff_combination (base_addr
, sizetype
, addr
);
910 tree_to_aff_combination (toff
, sizetype
, &tmp
);
911 aff_combination_add (addr
, &tmp
);
914 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
915 aff_combination_add (addr
, &tmp
);
917 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
922 /* Returns true if a region of size SIZE1 at position 0 and a region of
923 size SIZE2 at position DIFF cannot overlap. */
926 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
927 const widest_int
&size2
)
929 /* Unless the difference is a constant, we fail. */
933 if (wi::neg_p (diff
->offset
))
935 /* The second object is before the first one, we succeed if the last
936 element of the second object is before the start of the first one. */
937 return wi::neg_p (diff
->offset
+ size2
- 1);
941 /* We succeed if the second object starts after the first one ends. */
942 return wi::les_p (size1
, diff
->offset
);