1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "pointer-set.h"
27 #include "tree-affine.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "cfgexpand.h"
39 /* Extends CST as appropriate for the affine combinations COMB. */
42 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
44 return cst
.sext (TYPE_PRECISION (comb
->type
));
47 /* Initializes affine combination COMB so that its value is zero in TYPE. */
50 aff_combination_zero (aff_tree
*comb
, tree type
)
53 comb
->offset
= double_int_zero
;
55 comb
->rest
= NULL_TREE
;
58 /* Sets COMB to CST. */
61 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
63 aff_combination_zero (comb
, type
);
64 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
67 /* Sets COMB to single element ELT. */
70 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
72 aff_combination_zero (comb
, type
);
75 comb
->elts
[0].val
= elt
;
76 comb
->elts
[0].coef
= double_int_one
;
79 /* Scales COMB by SCALE. */
82 aff_combination_scale (aff_tree
*comb
, double_int scale
)
86 scale
= double_int_ext_for_comb (scale
, comb
);
92 aff_combination_zero (comb
, comb
->type
);
97 = double_int_ext_for_comb (scale
* comb
->offset
, comb
);
98 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
103 = double_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
104 /* A coefficient may become zero due to overflow. Remove the zero
106 if (new_coef
.is_zero ())
108 comb
->elts
[j
].coef
= new_coef
;
109 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
116 tree type
= comb
->type
;
117 if (POINTER_TYPE_P (type
))
119 if (comb
->n
< MAX_AFF_ELTS
)
121 comb
->elts
[comb
->n
].coef
= scale
;
122 comb
->elts
[comb
->n
].val
= comb
->rest
;
123 comb
->rest
= NULL_TREE
;
127 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
128 double_int_to_tree (type
, scale
));
132 /* Adds ELT * SCALE to COMB. */
135 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
140 scale
= double_int_ext_for_comb (scale
, comb
);
141 if (scale
.is_zero ())
144 for (i
= 0; i
< comb
->n
; i
++)
145 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
149 new_coef
= comb
->elts
[i
].coef
+ scale
;
150 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
151 if (!new_coef
.is_zero ())
153 comb
->elts
[i
].coef
= new_coef
;
158 comb
->elts
[i
] = comb
->elts
[comb
->n
];
162 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
163 comb
->elts
[comb
->n
].coef
= double_int_one
;
164 comb
->elts
[comb
->n
].val
= comb
->rest
;
165 comb
->rest
= NULL_TREE
;
170 if (comb
->n
< MAX_AFF_ELTS
)
172 comb
->elts
[comb
->n
].coef
= scale
;
173 comb
->elts
[comb
->n
].val
= elt
;
179 if (POINTER_TYPE_P (type
))
183 elt
= fold_convert (type
, elt
);
185 elt
= fold_build2 (MULT_EXPR
, type
,
186 fold_convert (type
, elt
),
187 double_int_to_tree (type
, scale
));
190 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
199 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
201 c
->offset
= double_int_ext_for_comb (c
->offset
+ cst
, c
);
204 /* Adds COMB2 to COMB1. */
207 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
211 aff_combination_add_cst (comb1
, comb2
->offset
);
212 for (i
= 0; i
< comb2
->n
; i
++)
213 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
215 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
218 /* Converts affine combination COMB to TYPE. */
221 aff_combination_convert (aff_tree
*comb
, tree type
)
224 tree comb_type
= comb
->type
;
226 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
228 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
229 tree_to_aff_combination (val
, type
, comb
);
234 if (comb
->rest
&& !POINTER_TYPE_P (type
))
235 comb
->rest
= fold_convert (type
, comb
->rest
);
237 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
240 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
241 for (i
= j
= 0; i
< comb
->n
; i
++)
243 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
244 if (new_coef
.is_zero ())
246 comb
->elts
[j
].coef
= new_coef
;
247 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
252 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
254 comb
->elts
[comb
->n
].coef
= double_int_one
;
255 comb
->elts
[comb
->n
].val
= comb
->rest
;
256 comb
->rest
= NULL_TREE
;
261 /* Splits EXPR into an affine combination of parts. */
264 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
268 tree cst
, core
, toffset
;
269 HOST_WIDE_INT bitpos
, bitsize
;
270 enum machine_mode mode
;
271 int unsignedp
, volatilep
;
275 code
= TREE_CODE (expr
);
279 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
282 case POINTER_PLUS_EXPR
:
283 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
284 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
285 aff_combination_add (comb
, &tmp
);
290 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
291 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
292 if (code
== MINUS_EXPR
)
293 aff_combination_scale (&tmp
, double_int_minus_one
);
294 aff_combination_add (comb
, &tmp
);
298 cst
= TREE_OPERAND (expr
, 1);
299 if (TREE_CODE (cst
) != INTEGER_CST
)
301 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
302 aff_combination_scale (comb
, tree_to_double_int (cst
));
306 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
307 aff_combination_scale (comb
, double_int_minus_one
);
312 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
313 aff_combination_scale (comb
, double_int_minus_one
);
314 aff_combination_add_cst (comb
, double_int_minus_one
);
318 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
319 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
321 expr
= TREE_OPERAND (expr
, 0);
322 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
323 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
324 aff_combination_add (comb
, &tmp
);
327 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
328 &toffset
, &mode
, &unsignedp
, &volatilep
,
330 if (bitpos
% BITS_PER_UNIT
!= 0)
332 aff_combination_const (comb
, type
,
333 double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
));
334 core
= build_fold_addr_expr (core
);
335 if (TREE_CODE (core
) == ADDR_EXPR
)
336 aff_combination_add_elt (comb
, core
, double_int_one
);
339 tree_to_aff_combination (core
, type
, &tmp
);
340 aff_combination_add (comb
, &tmp
);
344 tree_to_aff_combination (toffset
, type
, &tmp
);
345 aff_combination_add (comb
, &tmp
);
350 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
351 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
353 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
355 aff_combination_elt (comb
, type
, expr
);
359 aff_combination_elt (comb
, type
,
360 build2 (MEM_REF
, TREE_TYPE (expr
),
361 TREE_OPERAND (expr
, 0),
363 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
364 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
365 aff_combination_add (comb
, &tmp
);
372 aff_combination_elt (comb
, type
, expr
);
375 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
379 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
384 if (POINTER_TYPE_P (type
))
387 scale
= double_int_ext_for_comb (scale
, comb
);
389 if (scale
.is_minus_one ()
390 && POINTER_TYPE_P (TREE_TYPE (elt
)))
392 elt
= convert_to_ptrofftype (elt
);
393 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
394 scale
= double_int_one
;
401 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
404 return fold_convert (type1
, elt
);
407 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
408 return fold_build_pointer_plus (expr
, elt
);
409 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
410 return fold_build_pointer_plus (elt
, expr
);
411 return fold_build2 (PLUS_EXPR
, type1
,
412 expr
, fold_convert (type1
, elt
));
415 if (scale
.is_minus_one ())
418 return fold_build1 (NEGATE_EXPR
, type1
,
419 fold_convert (type1
, elt
));
421 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
423 elt
= convert_to_ptrofftype (elt
);
424 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
425 return fold_build_pointer_plus (expr
, elt
);
427 return fold_build2 (MINUS_EXPR
, type1
,
428 expr
, fold_convert (type1
, elt
));
431 elt
= fold_convert (type1
, elt
);
433 return fold_build2 (MULT_EXPR
, type1
, elt
,
434 double_int_to_tree (type1
, scale
));
436 if (scale
.is_negative ())
444 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
445 double_int_to_tree (type1
, scale
));
446 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
448 if (code
== MINUS_EXPR
)
449 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
450 return fold_build_pointer_plus (expr
, elt
);
452 return fold_build2 (code
, type1
, expr
, elt
);
455 /* Makes tree from the affine combination COMB. */
458 aff_combination_to_tree (aff_tree
*comb
)
460 tree type
= comb
->type
;
461 tree expr
= NULL_TREE
;
465 if (POINTER_TYPE_P (type
))
468 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
470 for (i
= 0; i
< comb
->n
; i
++)
471 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
475 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, double_int_one
, comb
);
477 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
479 if (comb
->offset
.is_negative ())
482 sgn
= double_int_minus_one
;
487 sgn
= double_int_one
;
489 return add_elt_to_tree (expr
, type
, double_int_to_tree (type1
, off
), sgn
,
493 /* Copies the tree elements of COMB to ensure that they are not shared. */
496 unshare_aff_combination (aff_tree
*comb
)
500 for (i
= 0; i
< comb
->n
; i
++)
501 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
503 comb
->rest
= unshare_expr (comb
->rest
);
506 /* Remove M-th element from COMB. */
509 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
513 comb
->elts
[m
] = comb
->elts
[comb
->n
];
516 comb
->elts
[comb
->n
].coef
= double_int_one
;
517 comb
->elts
[comb
->n
].val
= comb
->rest
;
518 comb
->rest
= NULL_TREE
;
523 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
524 C * COEF is added to R. */
528 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
534 for (i
= 0; i
< c
->n
; i
++)
536 aval
= c
->elts
[i
].val
;
539 type
= TREE_TYPE (aval
);
540 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
541 fold_convert (type
, val
));
544 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
552 type
= TREE_TYPE (aval
);
553 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
554 fold_convert (type
, val
));
557 aff_combination_add_elt (r
, aval
, coef
);
561 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
563 aff_combination_add_cst (r
, coef
* c
->offset
);
566 /* Multiplies C1 by C2, storing the result to R */
569 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
572 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
574 aff_combination_zero (r
, c1
->type
);
576 for (i
= 0; i
< c2
->n
; i
++)
577 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
579 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
580 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
583 /* Returns the element of COMB whose value is VAL, or NULL if no such
584 element exists. If IDX is not NULL, it is set to the index of VAL in
587 static struct aff_comb_elt
*
588 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
592 for (i
= 0; i
< comb
->n
; i
++)
593 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
598 return &comb
->elts
[i
];
604 /* Element of the cache that maps ssa name NAME to its expanded form
605 as an affine expression EXPANSION. */
607 struct name_expansion
611 /* True if the expansion for the name is just being generated. */
612 unsigned in_progress
: 1;
615 /* Expands SSA names in COMB recursively. CACHE is used to cache the
619 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
620 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
623 aff_tree to_add
, current
, curre
;
628 struct name_expansion
*exp
;
630 aff_combination_zero (&to_add
, comb
->type
);
631 for (i
= 0; i
< comb
->n
; i
++)
636 e
= comb
->elts
[i
].val
;
637 type
= TREE_TYPE (e
);
639 /* Look through some conversions. */
640 if (TREE_CODE (e
) == NOP_EXPR
641 && (TYPE_PRECISION (type
)
642 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
643 name
= TREE_OPERAND (e
, 0);
644 if (TREE_CODE (name
) != SSA_NAME
)
646 def
= SSA_NAME_DEF_STMT (name
);
647 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
650 code
= gimple_assign_rhs_code (def
);
652 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
653 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
654 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
657 /* We do not know whether the reference retains its value at the
658 place where the expansion is used. */
659 if (TREE_CODE_CLASS (code
) == tcc_reference
)
663 *cache
= pointer_map_create ();
664 slot
= pointer_map_insert (*cache
, e
);
665 exp
= (struct name_expansion
*) *slot
;
669 exp
= XNEW (struct name_expansion
);
670 exp
->in_progress
= 1;
672 /* In principle this is a generally valid folding, but
673 it is not unconditionally an optimization, so do it
674 here and not in fold_unary. */
675 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
676 than the type of X and overflow for the type of X is
679 && INTEGRAL_TYPE_P (type
)
680 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
681 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
682 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
683 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
684 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
685 rhs
= fold_build2 (code
, type
,
686 fold_convert (type
, gimple_assign_rhs1 (def
)),
687 fold_convert (type
, gimple_assign_rhs2 (def
)));
690 rhs
= gimple_assign_rhs_to_tree (def
);
692 rhs
= fold_convert (type
, rhs
);
694 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
695 exp
->expansion
= current
;
696 exp
->in_progress
= 0;
700 /* Since we follow the definitions in the SSA form, we should not
701 enter a cycle unless we pass through a phi node. */
702 gcc_assert (!exp
->in_progress
);
703 current
= exp
->expansion
;
706 /* Accumulate the new terms to TO_ADD, so that we do not modify
707 COMB while traversing it; include the term -coef * E, to remove
709 scale
= comb
->elts
[i
].coef
;
710 aff_combination_zero (&curre
, comb
->type
);
711 aff_combination_add_elt (&curre
, e
, -scale
);
712 aff_combination_scale (¤t
, scale
);
713 aff_combination_add (&to_add
, ¤t
);
714 aff_combination_add (&to_add
, &curre
);
716 aff_combination_add (comb
, &to_add
);
719 /* Similar to tree_to_aff_combination, but follows SSA name definitions
720 and expands them recursively. CACHE is used to cache the expansions
721 of the ssa names, to avoid exponential time complexity for cases
730 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
731 struct pointer_map_t
**cache
)
733 tree_to_aff_combination (expr
, type
, comb
);
734 aff_combination_expand (comb
, cache
);
737 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
738 pointer_map_traverse. */
741 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
742 void *data ATTRIBUTE_UNUSED
)
744 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
750 /* Frees memory allocated for the CACHE used by
751 tree_to_aff_combination_expand. */
754 free_affine_expand_cache (struct pointer_map_t
**cache
)
759 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
760 pointer_map_destroy (*cache
);
764 /* If VAL != CST * DIV for any constant CST, returns false.
765 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
766 and if they are different, returns false. Finally, if neither of these
767 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
771 double_int_constant_multiple_p (double_int val
, double_int div
,
772 bool *mult_set
, double_int
*mult
)
778 if (*mult_set
&& !mult
->is_zero ())
781 *mult
= double_int_zero
;
788 cst
= val
.sdivmod (div
, FLOOR_DIV_EXPR
, &rem
);
792 if (*mult_set
&& *mult
!= cst
)
800 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
801 X is stored to MULT. */
804 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
807 bool mult_set
= false;
810 if (val
->n
== 0 && val
->offset
.is_zero ())
812 *mult
= double_int_zero
;
815 if (val
->n
!= div
->n
)
818 if (val
->rest
|| div
->rest
)
821 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
825 for (i
= 0; i
< div
->n
; i
++)
827 struct aff_comb_elt
*elt
828 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
831 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
836 gcc_assert (mult_set
);
840 /* Prints the affine VAL to the FILE. */
843 print_aff (FILE *file
, aff_tree
*val
)
846 bool uns
= TYPE_UNSIGNED (val
->type
);
847 if (POINTER_TYPE_P (val
->type
))
849 fprintf (file
, "{\n type = ");
850 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
851 fprintf (file
, "\n offset = ");
852 dump_double_int (file
, val
->offset
, uns
);
855 fprintf (file
, "\n elements = {\n");
856 for (i
= 0; i
< val
->n
; i
++)
858 fprintf (file
, " [%d] = ", i
);
859 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
861 fprintf (file
, " * ");
862 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
864 fprintf (file
, ", \n");
866 fprintf (file
, "\n }");
870 fprintf (file
, "\n rest = ");
871 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
873 fprintf (file
, "\n}");
876 /* Prints the affine VAL to the standard error, used for debugging. */
879 debug_aff (aff_tree
*val
)
881 print_aff (stderr
, val
);
882 fprintf (stderr
, "\n");
885 /* Computes address of the reference REF in ADDR. The size of the accessed
886 location is stored to SIZE. Returns the ultimate containing object to
890 get_inner_reference_aff (tree ref
, aff_tree
*addr
, double_int
*size
)
892 HOST_WIDE_INT bitsize
, bitpos
;
894 enum machine_mode mode
;
897 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
899 tree base_addr
= build_fold_addr_expr (base
);
901 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
903 tree_to_aff_combination (base_addr
, sizetype
, addr
);
907 tree_to_aff_combination (toff
, sizetype
, &tmp
);
908 aff_combination_add (addr
, &tmp
);
911 aff_combination_const (&tmp
, sizetype
,
912 double_int::from_shwi (bitpos
/ BITS_PER_UNIT
));
913 aff_combination_add (addr
, &tmp
);
915 *size
= double_int::from_shwi ((bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
920 /* Returns true if a region of size SIZE1 at position 0 and a region of
921 size SIZE2 at position DIFF cannot overlap. */
924 aff_comb_cannot_overlap_p (aff_tree
*diff
, double_int size1
, double_int size2
)
928 /* Unless the difference is a constant, we fail. */
933 if (d
.is_negative ())
935 /* The second object is before the first one, we succeed if the last
936 element of the second object is before the start of the first one. */
937 bound
= d
+ size2
+ double_int_minus_one
;
938 return bound
.is_negative ();
942 /* We succeed if the second object starts after the first one ends. */
943 return size1
.sle (d
);