1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
33 #include "cfgexpand.h"
35 /* Extends CST as appropriate for the affine combinations COMB. */
38 wide_int_ext_for_comb (const widest_int
&cst
, tree type
)
40 return wi::sext (cst
, TYPE_PRECISION (type
));
43 /* Likewise for polynomial offsets. */
45 static poly_widest_int
46 wide_int_ext_for_comb (const poly_widest_int
&cst
, tree type
)
48 return wi::sext (cst
, TYPE_PRECISION (type
));
51 /* Initializes affine combination COMB so that its value is zero in TYPE. */
54 aff_combination_zero (aff_tree
*comb
, tree type
)
60 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
61 comb
->elts
[i
].coef
= 0;
62 comb
->rest
= NULL_TREE
;
65 /* Sets COMB to CST. */
68 aff_combination_const (aff_tree
*comb
, tree type
, const poly_widest_int
&cst
)
70 aff_combination_zero (comb
, type
);
71 comb
->offset
= wide_int_ext_for_comb (cst
, comb
->type
);;
74 /* Sets COMB to single element ELT. */
77 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
79 aff_combination_zero (comb
, type
);
82 comb
->elts
[0].val
= elt
;
83 comb
->elts
[0].coef
= 1;
86 /* Scales COMB by SCALE. */
89 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
93 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
99 aff_combination_zero (comb
, comb
->type
);
103 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
->type
);
104 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
107 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
->type
);
108 /* A coefficient may become zero due to overflow. Remove the zero
112 comb
->elts
[j
].coef
= new_coef
;
113 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
120 tree type
= comb
->type
;
121 if (POINTER_TYPE_P (type
))
123 if (comb
->n
< MAX_AFF_ELTS
)
125 comb
->elts
[comb
->n
].coef
= scale
;
126 comb
->elts
[comb
->n
].val
= comb
->rest
;
127 comb
->rest
= NULL_TREE
;
131 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
132 wide_int_to_tree (type
, scale
));
136 /* Adds ELT * SCALE to COMB. */
139 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
144 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
148 for (i
= 0; i
< comb
->n
; i
++)
149 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
152 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
->type
);
155 comb
->elts
[i
].coef
= new_coef
;
160 comb
->elts
[i
] = comb
->elts
[comb
->n
];
164 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
165 comb
->elts
[comb
->n
].coef
= 1;
166 comb
->elts
[comb
->n
].val
= comb
->rest
;
167 comb
->rest
= NULL_TREE
;
172 if (comb
->n
< MAX_AFF_ELTS
)
174 comb
->elts
[comb
->n
].coef
= scale
;
175 comb
->elts
[comb
->n
].val
= elt
;
181 if (POINTER_TYPE_P (type
))
185 elt
= fold_convert (type
, elt
);
187 elt
= fold_build2 (MULT_EXPR
, type
,
188 fold_convert (type
, elt
),
189 wide_int_to_tree (type
, scale
));
192 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
201 aff_combination_add_cst (aff_tree
*c
, const poly_widest_int
&cst
)
203 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
->type
);
206 /* Adds COMB2 to COMB1. */
209 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
213 aff_combination_add_cst (comb1
, comb2
->offset
);
214 for (i
= 0; i
< comb2
->n
; i
++)
215 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
217 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
220 /* Converts affine combination COMB to TYPE. */
223 aff_combination_convert (aff_tree
*comb
, tree type
)
226 tree comb_type
= comb
->type
;
228 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
230 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
231 tree_to_aff_combination (val
, type
, comb
);
236 if (comb
->rest
&& !POINTER_TYPE_P (type
))
237 comb
->rest
= fold_convert (type
, comb
->rest
);
239 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
242 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
->type
);
243 for (i
= j
= 0; i
< comb
->n
; i
++)
245 if (comb
->elts
[i
].coef
== 0)
247 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
248 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
253 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
255 comb
->elts
[comb
->n
].coef
= 1;
256 comb
->elts
[comb
->n
].val
= comb
->rest
;
257 comb
->rest
= NULL_TREE
;
262 /* Splits EXPR into an affine combination of parts. */
265 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
269 tree cst
, core
, toffset
;
270 poly_int64 bitpos
, bitsize
, bytepos
;
272 int unsignedp
, reversep
, volatilep
;
276 code
= TREE_CODE (expr
);
279 case POINTER_PLUS_EXPR
:
280 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
281 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
282 aff_combination_add (comb
, &tmp
);
287 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
289 if (code
== MINUS_EXPR
)
290 aff_combination_scale (&tmp
, -1);
291 aff_combination_add (comb
, &tmp
);
295 cst
= TREE_OPERAND (expr
, 1);
296 if (TREE_CODE (cst
) != INTEGER_CST
)
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 aff_combination_scale (comb
, wi::to_widest (cst
));
303 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
304 aff_combination_scale (comb
, -1);
309 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
310 aff_combination_scale (comb
, -1);
311 aff_combination_add_cst (comb
, -1);
315 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
316 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
318 expr
= TREE_OPERAND (expr
, 0);
319 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
320 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
321 aff_combination_add (comb
, &tmp
);
324 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
325 &toffset
, &mode
, &unsignedp
, &reversep
,
327 if (!multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
))
329 aff_combination_const (comb
, type
, bytepos
);
330 if (TREE_CODE (core
) == MEM_REF
)
332 tree mem_offset
= TREE_OPERAND (core
, 1);
333 aff_combination_add_cst (comb
, wi::to_poly_widest (mem_offset
));
334 core
= TREE_OPERAND (core
, 0);
337 core
= build_fold_addr_expr (core
);
339 if (TREE_CODE (core
) == ADDR_EXPR
)
340 aff_combination_add_elt (comb
, core
, 1);
343 tree_to_aff_combination (core
, type
, &tmp
);
344 aff_combination_add (comb
, &tmp
);
348 tree_to_aff_combination (toffset
, type
, &tmp
);
349 aff_combination_add (comb
, &tmp
);
355 tree otype
= TREE_TYPE (expr
);
356 tree inner
= TREE_OPERAND (expr
, 0);
357 tree itype
= TREE_TYPE (inner
);
358 enum tree_code icode
= TREE_CODE (inner
);
360 /* In principle this is a valid folding, but it isn't necessarily
361 an optimization, so do it here and not in fold_unary. */
362 if ((icode
== PLUS_EXPR
|| icode
== MINUS_EXPR
|| icode
== MULT_EXPR
)
363 && TREE_CODE (itype
) == INTEGER_TYPE
364 && TREE_CODE (otype
) == INTEGER_TYPE
365 && TYPE_PRECISION (otype
) > TYPE_PRECISION (itype
))
367 tree op0
= TREE_OPERAND (inner
, 0), op1
= TREE_OPERAND (inner
, 1);
369 /* If inner type has undefined overflow behavior, fold conversion
371 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
372 (T1)(X + X) -> (T1)X + (T1)X. */
373 if (TYPE_OVERFLOW_UNDEFINED (itype
)
374 && (TREE_CODE (op1
) == INTEGER_CST
375 || (icode
== PLUS_EXPR
&& operand_equal_p (op0
, op1
, 0))))
377 op0
= fold_convert (otype
, op0
);
378 op1
= fold_convert (otype
, op1
);
379 expr
= fold_build2 (icode
, otype
, op0
, op1
);
380 tree_to_aff_combination (expr
, type
, comb
);
384 /* If inner type has wrapping overflow behavior, fold conversion
386 (T1)(X - CST) -> (T1)X - (T1)CST
387 if X - CST doesn't overflow by range information. Also handle
388 (T1)(X + CST) as (T1)(X - (-CST)). */
389 if (TYPE_UNSIGNED (itype
)
390 && TYPE_OVERFLOW_WRAPS (itype
)
391 && TREE_CODE (op0
) == SSA_NAME
392 && TREE_CODE (op1
) == INTEGER_CST
393 && icode
!= MULT_EXPR
394 && get_range_info (op0
, &minv
, &maxv
) == VR_RANGE
)
396 if (icode
== PLUS_EXPR
)
397 op1
= wide_int_to_tree (itype
, -wi::to_wide (op1
));
398 if (wi::geu_p (minv
, wi::to_wide (op1
)))
400 op0
= fold_convert (otype
, op0
);
401 op1
= fold_convert (otype
, op1
);
402 expr
= fold_build2 (MINUS_EXPR
, otype
, op0
, op1
);
403 tree_to_aff_combination (expr
, type
, comb
);
413 if (poly_int_tree_p (expr
))
415 aff_combination_const (comb
, type
, wi::to_poly_widest (expr
));
422 aff_combination_elt (comb
, type
, expr
);
425 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
429 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
)
433 widest_int scale
= wide_int_ext_for_comb (scale_in
, type
);
435 elt
= fold_convert (type
, elt
);
441 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
447 return fold_build1 (NEGATE_EXPR
, type
, elt
);
449 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
453 return fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
455 if (wi::neg_p (scale
))
463 elt
= fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
464 return fold_build2 (code
, type
, expr
, elt
);
467 /* Makes tree from the affine combination COMB. */
470 aff_combination_to_tree (aff_tree
*comb
)
472 tree type
= comb
->type
, base
= NULL_TREE
, expr
= NULL_TREE
;
477 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
480 if (POINTER_TYPE_P (type
))
483 if (comb
->n
> 0 && comb
->elts
[0].coef
== 1
484 && POINTER_TYPE_P (TREE_TYPE (comb
->elts
[0].val
)))
486 base
= comb
->elts
[0].val
;
491 for (; i
< comb
->n
; i
++)
492 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
);
495 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1);
497 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
499 if (known_lt (comb
->offset
, 0))
509 expr
= add_elt_to_tree (expr
, type
, wide_int_to_tree (type
, off
), sgn
);
512 return fold_build_pointer_plus (base
, expr
);
514 return fold_convert (comb
->type
, expr
);
517 /* Copies the tree elements of COMB to ensure that they are not shared. */
520 unshare_aff_combination (aff_tree
*comb
)
524 for (i
= 0; i
< comb
->n
; i
++)
525 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
527 comb
->rest
= unshare_expr (comb
->rest
);
530 /* Remove M-th element from COMB. */
533 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
537 comb
->elts
[m
] = comb
->elts
[comb
->n
];
540 comb
->elts
[comb
->n
].coef
= 1;
541 comb
->elts
[comb
->n
].val
= comb
->rest
;
542 comb
->rest
= NULL_TREE
;
547 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
548 C * COEF is added to R. */
552 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
558 for (i
= 0; i
< c
->n
; i
++)
560 aval
= c
->elts
[i
].val
;
563 type
= TREE_TYPE (aval
);
564 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
565 fold_convert (type
, val
));
568 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
576 type
= TREE_TYPE (aval
);
577 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
578 fold_convert (type
, val
));
581 aff_combination_add_elt (r
, aval
, coef
);
586 if (c
->offset
.is_constant ())
587 /* Access coeffs[0] directly, for efficiency. */
588 aff_combination_add_elt (r
, val
, coef
* c
->offset
.coeffs
[0]);
591 /* c->offset is polynomial, so multiply VAL rather than COEF
593 tree offset
= wide_int_to_tree (TREE_TYPE (val
), c
->offset
);
594 val
= fold_build2 (MULT_EXPR
, TREE_TYPE (val
), val
, offset
);
595 aff_combination_add_elt (r
, val
, coef
);
599 aff_combination_add_cst (r
, coef
* c
->offset
);
602 /* Multiplies C1 by C2, storing the result to R */
605 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
608 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
610 aff_combination_zero (r
, c1
->type
);
612 for (i
= 0; i
< c2
->n
; i
++)
613 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
615 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
616 if (c2
->offset
.is_constant ())
617 /* Access coeffs[0] directly, for efficiency. */
618 aff_combination_add_product (c1
, c2
->offset
.coeffs
[0], NULL
, r
);
621 /* c2->offset is polynomial, so do the multiplication in tree form. */
622 tree offset
= wide_int_to_tree (c2
->type
, c2
->offset
);
623 aff_combination_add_product (c1
, 1, offset
, r
);
627 /* Returns the element of COMB whose value is VAL, or NULL if no such
628 element exists. If IDX is not NULL, it is set to the index of VAL in
631 static struct aff_comb_elt
*
632 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
636 for (i
= 0; i
< comb
->n
; i
++)
637 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
642 return &comb
->elts
[i
];
648 /* Element of the cache that maps ssa name NAME to its expanded form
649 as an affine expression EXPANSION. */
651 struct name_expansion
655 /* True if the expansion for the name is just being generated. */
656 unsigned in_progress
: 1;
659 /* Expands SSA names in COMB recursively. CACHE is used to cache the
663 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
664 hash_map
<tree
, name_expansion
*> **cache
)
667 aff_tree to_add
, current
, curre
;
671 struct name_expansion
*exp
;
673 aff_combination_zero (&to_add
, comb
->type
);
674 for (i
= 0; i
< comb
->n
; i
++)
679 e
= comb
->elts
[i
].val
;
680 type
= TREE_TYPE (e
);
682 /* Look through some conversions. */
683 if (CONVERT_EXPR_P (e
)
684 && (TYPE_PRECISION (type
)
685 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
686 name
= TREE_OPERAND (e
, 0);
687 if (TREE_CODE (name
) != SSA_NAME
)
689 def
= SSA_NAME_DEF_STMT (name
);
690 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
693 code
= gimple_assign_rhs_code (def
);
695 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
696 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
697 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
700 /* We do not know whether the reference retains its value at the
701 place where the expansion is used. */
702 if (TREE_CODE_CLASS (code
) == tcc_reference
)
705 name_expansion
**slot
= NULL
;
707 slot
= (*cache
)->get (name
);
708 exp
= slot
? *slot
: NULL
;
711 /* Only bother to handle cases tree_to_aff_combination will. */
714 case POINTER_PLUS_EXPR
:
721 rhs
= gimple_assign_rhs_to_tree (def
);
726 rhs
= gimple_assign_rhs1 (def
);
731 tree_to_aff_combination (rhs
, TREE_TYPE (name
), ¤t
);
732 exp
= XNEW (struct name_expansion
);
733 exp
->in_progress
= 1;
735 *cache
= new hash_map
<tree
, name_expansion
*>;
736 (*cache
)->put (name
, exp
);
737 aff_combination_expand (¤t
, cache
);
738 exp
->expansion
= current
;
739 exp
->in_progress
= 0;
743 /* Since we follow the definitions in the SSA form, we should not
744 enter a cycle unless we pass through a phi node. */
745 gcc_assert (!exp
->in_progress
);
746 current
= exp
->expansion
;
748 if (!useless_type_conversion_p (comb
->type
, current
.type
))
749 aff_combination_convert (¤t
, comb
->type
);
751 /* Accumulate the new terms to TO_ADD, so that we do not modify
752 COMB while traversing it; include the term -coef * E, to remove
754 scale
= comb
->elts
[i
].coef
;
755 aff_combination_zero (&curre
, comb
->type
);
756 aff_combination_add_elt (&curre
, e
, -scale
);
757 aff_combination_scale (¤t
, scale
);
758 aff_combination_add (&to_add
, ¤t
);
759 aff_combination_add (&to_add
, &curre
);
761 aff_combination_add (comb
, &to_add
);
764 /* Similar to tree_to_aff_combination, but follows SSA name definitions
765 and expands them recursively. CACHE is used to cache the expansions
766 of the ssa names, to avoid exponential time complexity for cases
775 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
776 hash_map
<tree
, name_expansion
*> **cache
)
778 tree_to_aff_combination (expr
, type
, comb
);
779 aff_combination_expand (comb
, cache
);
782 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
783 hash_map::traverse. */
786 free_name_expansion (tree
const &, name_expansion
**value
, void *)
792 /* Frees memory allocated for the CACHE used by
793 tree_to_aff_combination_expand. */
796 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
801 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
806 /* If VAL != CST * DIV for any constant CST, returns false.
807 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
808 and if they are different, returns false. Finally, if neither of these
809 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
813 wide_int_constant_multiple_p (const poly_widest_int
&val
,
814 const poly_widest_int
&div
,
815 bool *mult_set
, poly_widest_int
*mult
)
817 poly_widest_int rem
, cst
;
819 if (known_eq (val
, 0))
821 if (*mult_set
&& maybe_ne (*mult
, 0))
828 if (maybe_eq (div
, 0))
831 if (!multiple_p (val
, div
, &cst
))
834 if (*mult_set
&& maybe_ne (*mult
, cst
))
842 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
843 X is stored to MULT. */
846 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
847 poly_widest_int
*mult
)
849 bool mult_set
= false;
852 if (val
->n
== 0 && known_eq (val
->offset
, 0))
857 if (val
->n
!= div
->n
)
860 if (val
->rest
|| div
->rest
)
863 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
867 for (i
= 0; i
< div
->n
; i
++)
869 struct aff_comb_elt
*elt
870 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
873 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
878 gcc_assert (mult_set
);
882 /* Prints the affine VAL to the FILE. */
885 print_aff (FILE *file
, aff_tree
*val
)
888 signop sgn
= TYPE_SIGN (val
->type
);
889 if (POINTER_TYPE_P (val
->type
))
891 fprintf (file
, "{\n type = ");
892 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
893 fprintf (file
, "\n offset = ");
894 print_dec (val
->offset
, file
, sgn
);
897 fprintf (file
, "\n elements = {\n");
898 for (i
= 0; i
< val
->n
; i
++)
900 fprintf (file
, " [%d] = ", i
);
901 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
903 fprintf (file
, " * ");
904 print_dec (val
->elts
[i
].coef
, file
, sgn
);
906 fprintf (file
, ", \n");
908 fprintf (file
, "\n }");
912 fprintf (file
, "\n rest = ");
913 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
915 fprintf (file
, "\n}");
918 /* Prints the affine VAL to the standard error, used for debugging. */
921 debug_aff (aff_tree
*val
)
923 print_aff (stderr
, val
);
924 fprintf (stderr
, "\n");
927 /* Computes address of the reference REF in ADDR. The size of the accessed
928 location is stored to SIZE. Returns the ultimate containing object to
932 get_inner_reference_aff (tree ref
, aff_tree
*addr
, poly_widest_int
*size
)
934 poly_int64 bitsize
, bitpos
;
939 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
941 tree base_addr
= build_fold_addr_expr (base
);
943 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
945 tree_to_aff_combination (base_addr
, sizetype
, addr
);
949 tree_to_aff_combination (toff
, sizetype
, &tmp
);
950 aff_combination_add (addr
, &tmp
);
953 aff_combination_const (&tmp
, sizetype
, bits_to_bytes_round_down (bitpos
));
954 aff_combination_add (addr
, &tmp
);
956 *size
= bits_to_bytes_round_up (bitsize
);
961 /* Returns true if a region of size SIZE1 at position 0 and a region of
962 size SIZE2 at position DIFF cannot overlap. */
965 aff_comb_cannot_overlap_p (aff_tree
*diff
, const poly_widest_int
&size1
,
966 const poly_widest_int
&size2
)
968 /* Unless the difference is a constant, we fail. */
972 if (!ordered_p (diff
->offset
, 0))
975 if (maybe_lt (diff
->offset
, 0))
977 /* The second object is before the first one, we succeed if the last
978 element of the second object is before the start of the first one. */
979 return known_le (diff
->offset
+ size2
, 0);
983 /* We succeed if the second object starts after the first one ends. */
984 return known_le (size1
, diff
->offset
);