1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
33 #include "cfgexpand.h"
34 #include "value-query.h"
36 /* Extends CST as appropriate for the affine combinations COMB. */
39 wide_int_ext_for_comb (const widest_int
&cst
, tree type
)
41 return wi::sext (cst
, TYPE_PRECISION (type
));
44 /* Likewise for polynomial offsets. */
46 static poly_widest_int
47 wide_int_ext_for_comb (const poly_widest_int
&cst
, tree type
)
49 return wi::sext (cst
, TYPE_PRECISION (type
));
52 /* Initializes affine combination COMB so that its value is zero in TYPE. */
55 aff_combination_zero (aff_tree
*comb
, tree type
)
61 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
62 comb
->elts
[i
].coef
= 0;
63 comb
->rest
= NULL_TREE
;
66 /* Sets COMB to CST. */
69 aff_combination_const (aff_tree
*comb
, tree type
, const poly_widest_int
&cst
)
71 aff_combination_zero (comb
, type
);
72 comb
->offset
= wide_int_ext_for_comb (cst
, comb
->type
);;
75 /* Sets COMB to single element ELT. */
78 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
80 aff_combination_zero (comb
, type
);
83 comb
->elts
[0].val
= elt
;
84 comb
->elts
[0].coef
= 1;
87 /* Scales COMB by SCALE. */
90 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
94 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
100 aff_combination_zero (comb
, comb
->type
);
104 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
->type
);
105 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
108 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
->type
);
109 /* A coefficient may become zero due to overflow. Remove the zero
113 comb
->elts
[j
].coef
= new_coef
;
114 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
121 tree type
= comb
->type
;
122 if (POINTER_TYPE_P (type
))
124 if (comb
->n
< MAX_AFF_ELTS
)
126 comb
->elts
[comb
->n
].coef
= scale
;
127 comb
->elts
[comb
->n
].val
= comb
->rest
;
128 comb
->rest
= NULL_TREE
;
132 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
133 wide_int_to_tree (type
, scale
));
137 /* Adds ELT * SCALE to COMB. */
140 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
145 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
149 for (i
= 0; i
< comb
->n
; i
++)
150 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
153 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
->type
);
156 comb
->elts
[i
].coef
= new_coef
;
161 comb
->elts
[i
] = comb
->elts
[comb
->n
];
165 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
166 comb
->elts
[comb
->n
].coef
= 1;
167 comb
->elts
[comb
->n
].val
= comb
->rest
;
168 comb
->rest
= NULL_TREE
;
173 if (comb
->n
< MAX_AFF_ELTS
)
175 comb
->elts
[comb
->n
].coef
= scale
;
176 comb
->elts
[comb
->n
].val
= elt
;
182 if (POINTER_TYPE_P (type
))
186 elt
= fold_convert (type
, elt
);
188 elt
= fold_build2 (MULT_EXPR
, type
,
189 fold_convert (type
, elt
),
190 wide_int_to_tree (type
, scale
));
193 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
202 aff_combination_add_cst (aff_tree
*c
, const poly_widest_int
&cst
)
204 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
->type
);
207 /* Adds COMB2 to COMB1. */
210 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
214 aff_combination_add_cst (comb1
, comb2
->offset
);
215 for (i
= 0; i
< comb2
->n
; i
++)
216 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
218 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
221 /* Converts affine combination COMB to TYPE. */
224 aff_combination_convert (aff_tree
*comb
, tree type
)
227 tree comb_type
= comb
->type
;
229 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
231 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
232 tree_to_aff_combination (val
, type
, comb
);
237 if (comb
->rest
&& !POINTER_TYPE_P (type
))
238 comb
->rest
= fold_convert (type
, comb
->rest
);
240 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
243 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
->type
);
244 for (i
= j
= 0; i
< comb
->n
; i
++)
246 if (comb
->elts
[i
].coef
== 0)
248 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
249 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
254 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
256 comb
->elts
[comb
->n
].coef
= 1;
257 comb
->elts
[comb
->n
].val
= comb
->rest
;
258 comb
->rest
= NULL_TREE
;
263 /* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
264 true when that was successful and returns the combination in COMB. */
267 expr_to_aff_combination (aff_tree
*comb
, tree_code code
, tree type
,
268 tree op0
, tree op1
= NULL_TREE
)
271 poly_int64 bitpos
, bitsize
, bytepos
;
275 case POINTER_PLUS_EXPR
:
276 tree_to_aff_combination (op0
, type
, comb
);
277 tree_to_aff_combination (op1
, sizetype
, &tmp
);
278 aff_combination_add (comb
, &tmp
);
283 tree_to_aff_combination (op0
, type
, comb
);
284 tree_to_aff_combination (op1
, type
, &tmp
);
285 if (code
== MINUS_EXPR
)
286 aff_combination_scale (&tmp
, -1);
287 aff_combination_add (comb
, &tmp
);
291 if (TREE_CODE (op1
) != INTEGER_CST
)
293 tree_to_aff_combination (op0
, type
, comb
);
294 aff_combination_scale (comb
, wi::to_widest (op1
));
298 tree_to_aff_combination (op0
, type
, comb
);
299 aff_combination_scale (comb
, -1);
304 tree_to_aff_combination (op0
, type
, comb
);
305 aff_combination_scale (comb
, -1);
306 aff_combination_add_cst (comb
, -1);
313 tree itype
= TREE_TYPE (inner
);
314 enum tree_code icode
= TREE_CODE (inner
);
317 if (tree_nop_conversion_p (otype
, itype
))
319 tree_to_aff_combination (op0
, type
, comb
);
323 /* In principle this is a valid folding, but it isn't necessarily
324 an optimization, so do it here and not in fold_unary. */
325 if ((icode
== PLUS_EXPR
|| icode
== MINUS_EXPR
|| icode
== MULT_EXPR
)
326 && TREE_CODE (itype
) == INTEGER_TYPE
327 && TREE_CODE (otype
) == INTEGER_TYPE
328 && TYPE_PRECISION (otype
) > TYPE_PRECISION (itype
))
330 tree op0
= TREE_OPERAND (inner
, 0), op1
= TREE_OPERAND (inner
, 1);
332 /* If inner type has undefined overflow behavior, fold conversion
334 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
335 (T1)(X + X) -> (T1)X + (T1)X. */
336 if (TYPE_OVERFLOW_UNDEFINED (itype
)
337 && (TREE_CODE (op1
) == INTEGER_CST
338 || (icode
== PLUS_EXPR
&& operand_equal_p (op0
, op1
, 0))))
340 op0
= fold_convert (otype
, op0
);
341 op1
= fold_convert (otype
, op1
);
342 return expr_to_aff_combination (comb
, icode
, otype
, op0
, op1
);
345 /* If inner type has wrapping overflow behavior, fold conversion
347 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
348 if X *+- CST doesn't overflow by range information. */
350 if (TYPE_UNSIGNED (itype
)
351 && TYPE_OVERFLOW_WRAPS (itype
)
352 && TREE_CODE (op1
) == INTEGER_CST
353 && get_range_query (cfun
)->range_of_expr (vr
, op0
)
355 && !vr
.undefined_p ())
357 wide_int minv
= vr
.lower_bound ();
358 wide_int maxv
= vr
.upper_bound ();
359 wi::overflow_type overflow
= wi::OVF_NONE
;
360 signop sign
= UNSIGNED
;
361 if (icode
== PLUS_EXPR
)
362 wi::add (maxv
, wi::to_wide (op1
), sign
, &overflow
);
363 else if (icode
== MULT_EXPR
)
364 wi::mul (maxv
, wi::to_wide (op1
), sign
, &overflow
);
366 wi::sub (minv
, wi::to_wide (op1
), sign
, &overflow
);
368 if (overflow
== wi::OVF_NONE
)
370 op0
= fold_convert (otype
, op0
);
371 op1
= fold_convert (otype
, op1
);
372 return expr_to_aff_combination (comb
, icode
, otype
, op0
,
386 /* Splits EXPR into an affine combination of parts. */
389 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
394 poly_int64 bitpos
, bitsize
, bytepos
;
396 int unsignedp
, reversep
, volatilep
;
400 code
= TREE_CODE (expr
);
403 case POINTER_PLUS_EXPR
:
407 if (expr_to_aff_combination (comb
, code
, type
, TREE_OPERAND (expr
, 0),
408 TREE_OPERAND (expr
, 1)))
414 if (expr_to_aff_combination (comb
, code
, type
, TREE_OPERAND (expr
, 0)))
419 /* ??? TREE_TYPE (expr) should be equal to type here, but IVOPTS
420 calls this with not showing an outer widening cast. */
421 if (expr_to_aff_combination (comb
, code
,
422 TREE_TYPE (expr
), TREE_OPERAND (expr
, 0)))
424 aff_combination_convert (comb
, type
);
430 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
431 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
433 expr
= TREE_OPERAND (expr
, 0);
434 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
435 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
436 aff_combination_add (comb
, &tmp
);
439 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
440 &toffset
, &mode
, &unsignedp
, &reversep
,
442 if (!multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
))
444 aff_combination_const (comb
, type
, bytepos
);
445 if (TREE_CODE (core
) == MEM_REF
)
447 tree mem_offset
= TREE_OPERAND (core
, 1);
448 aff_combination_add_cst (comb
, wi::to_poly_widest (mem_offset
));
449 core
= TREE_OPERAND (core
, 0);
452 core
= build_fold_addr_expr (core
);
454 if (TREE_CODE (core
) == ADDR_EXPR
)
455 aff_combination_add_elt (comb
, core
, 1);
458 tree_to_aff_combination (core
, type
, &tmp
);
459 aff_combination_add (comb
, &tmp
);
463 tree_to_aff_combination (toffset
, type
, &tmp
);
464 aff_combination_add (comb
, &tmp
);
470 if (poly_int_tree_p (expr
))
472 aff_combination_const (comb
, type
, wi::to_poly_widest (expr
));
479 aff_combination_elt (comb
, type
, expr
);
482 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
486 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
)
490 widest_int scale
= wide_int_ext_for_comb (scale_in
, type
);
492 elt
= fold_convert (type
, elt
);
498 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
504 return fold_build1 (NEGATE_EXPR
, type
, elt
);
506 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
510 return fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
512 if (wi::neg_p (scale
))
520 elt
= fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
521 return fold_build2 (code
, type
, expr
, elt
);
524 /* Makes tree from the affine combination COMB. */
527 aff_combination_to_tree (aff_tree
*comb
)
529 tree type
= comb
->type
, base
= NULL_TREE
, expr
= NULL_TREE
;
534 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
537 if (POINTER_TYPE_P (type
))
540 if (comb
->n
> 0 && comb
->elts
[0].coef
== 1
541 && POINTER_TYPE_P (TREE_TYPE (comb
->elts
[0].val
)))
543 base
= comb
->elts
[0].val
;
548 for (; i
< comb
->n
; i
++)
549 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
);
552 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1);
554 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
556 if (known_lt (comb
->offset
, 0))
566 expr
= add_elt_to_tree (expr
, type
, wide_int_to_tree (type
, off
), sgn
);
569 return fold_build_pointer_plus (base
, expr
);
571 return fold_convert (comb
->type
, expr
);
574 /* Copies the tree elements of COMB to ensure that they are not shared. */
577 unshare_aff_combination (aff_tree
*comb
)
581 for (i
= 0; i
< comb
->n
; i
++)
582 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
584 comb
->rest
= unshare_expr (comb
->rest
);
587 /* Remove M-th element from COMB. */
590 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
594 comb
->elts
[m
] = comb
->elts
[comb
->n
];
597 comb
->elts
[comb
->n
].coef
= 1;
598 comb
->elts
[comb
->n
].val
= comb
->rest
;
599 comb
->rest
= NULL_TREE
;
604 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
605 C * COEF is added to R. */
609 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
615 for (i
= 0; i
< c
->n
; i
++)
617 aval
= c
->elts
[i
].val
;
620 type
= TREE_TYPE (aval
);
621 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
622 fold_convert (type
, val
));
625 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
633 type
= TREE_TYPE (aval
);
634 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
635 fold_convert (type
, val
));
638 aff_combination_add_elt (r
, aval
, coef
);
643 if (c
->offset
.is_constant ())
644 /* Access coeffs[0] directly, for efficiency. */
645 aff_combination_add_elt (r
, val
, coef
* c
->offset
.coeffs
[0]);
648 /* c->offset is polynomial, so multiply VAL rather than COEF
650 tree offset
= wide_int_to_tree (TREE_TYPE (val
), c
->offset
);
651 val
= fold_build2 (MULT_EXPR
, TREE_TYPE (val
), val
, offset
);
652 aff_combination_add_elt (r
, val
, coef
);
656 aff_combination_add_cst (r
, coef
* c
->offset
);
659 /* Multiplies C1 by C2, storing the result to R */
662 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
665 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
667 aff_combination_zero (r
, c1
->type
);
669 for (i
= 0; i
< c2
->n
; i
++)
670 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
672 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
673 if (c2
->offset
.is_constant ())
674 /* Access coeffs[0] directly, for efficiency. */
675 aff_combination_add_product (c1
, c2
->offset
.coeffs
[0], NULL
, r
);
678 /* c2->offset is polynomial, so do the multiplication in tree form. */
679 tree offset
= wide_int_to_tree (c2
->type
, c2
->offset
);
680 aff_combination_add_product (c1
, 1, offset
, r
);
684 /* Returns the element of COMB whose value is VAL, or NULL if no such
685 element exists. If IDX is not NULL, it is set to the index of VAL in
688 static class aff_comb_elt
*
689 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
693 for (i
= 0; i
< comb
->n
; i
++)
694 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
699 return &comb
->elts
[i
];
705 /* Element of the cache that maps ssa name NAME to its expanded form
706 as an affine expression EXPANSION. */
713 /* True if the expansion for the name is just being generated. */
714 unsigned in_progress
: 1;
717 /* Expands SSA names in COMB recursively. CACHE is used to cache the
721 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
722 hash_map
<tree
, name_expansion
*> **cache
)
725 aff_tree to_add
, current
, curre
;
729 class name_expansion
*exp
;
731 aff_combination_zero (&to_add
, comb
->type
);
732 for (i
= 0; i
< comb
->n
; i
++)
737 e
= comb
->elts
[i
].val
;
738 type
= TREE_TYPE (e
);
740 /* Look through some conversions. */
741 if (CONVERT_EXPR_P (e
)
742 && (TYPE_PRECISION (type
)
743 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
744 name
= TREE_OPERAND (e
, 0);
745 if (TREE_CODE (name
) != SSA_NAME
)
747 def
= SSA_NAME_DEF_STMT (name
);
748 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
751 code
= gimple_assign_rhs_code (def
);
753 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
754 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
755 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
758 /* We do not know whether the reference retains its value at the
759 place where the expansion is used. */
760 if (TREE_CODE_CLASS (code
) == tcc_reference
)
763 name_expansion
**slot
= NULL
;
765 slot
= (*cache
)->get (name
);
766 exp
= slot
? *slot
: NULL
;
769 /* Only bother to handle cases tree_to_aff_combination will. */
772 case POINTER_PLUS_EXPR
:
776 if (!expr_to_aff_combination (¤t
, code
, TREE_TYPE (name
),
777 gimple_assign_rhs1 (def
),
778 gimple_assign_rhs2 (def
)))
783 if (!expr_to_aff_combination (¤t
, code
, TREE_TYPE (name
),
784 gimple_assign_rhs1 (def
)))
788 if (!expr_to_aff_combination (¤t
, code
, TREE_TYPE (name
),
789 gimple_assign_rhs1 (def
)))
790 /* This makes us always expand conversions which we did
791 in the past and makes gcc.dg/tree-ssa/ivopts-lt-2.c
792 PASS, eliminating one induction variable in IVOPTs.
793 ??? But it is really excessive and we should try
794 harder to do without it. */
795 aff_combination_elt (¤t
, TREE_TYPE (name
),
796 fold_convert (TREE_TYPE (name
),
797 gimple_assign_rhs1 (def
)));
802 tree_to_aff_combination (gimple_assign_rhs1 (def
),
803 TREE_TYPE (name
), ¤t
);
808 exp
= XNEW (class name_expansion
);
809 exp
->in_progress
= 1;
811 *cache
= new hash_map
<tree
, name_expansion
*>;
812 (*cache
)->put (name
, exp
);
813 aff_combination_expand (¤t
, cache
);
814 exp
->expansion
= current
;
815 exp
->in_progress
= 0;
819 /* Since we follow the definitions in the SSA form, we should not
820 enter a cycle unless we pass through a phi node. */
821 gcc_assert (!exp
->in_progress
);
822 current
= exp
->expansion
;
824 if (!useless_type_conversion_p (comb
->type
, current
.type
))
825 aff_combination_convert (¤t
, comb
->type
);
827 /* Accumulate the new terms to TO_ADD, so that we do not modify
828 COMB while traversing it; include the term -coef * E, to remove
830 scale
= comb
->elts
[i
].coef
;
831 aff_combination_zero (&curre
, comb
->type
);
832 aff_combination_add_elt (&curre
, e
, -scale
);
833 aff_combination_scale (¤t
, scale
);
834 aff_combination_add (&to_add
, ¤t
);
835 aff_combination_add (&to_add
, &curre
);
837 aff_combination_add (comb
, &to_add
);
840 /* Similar to tree_to_aff_combination, but follows SSA name definitions
841 and expands them recursively. CACHE is used to cache the expansions
842 of the ssa names, to avoid exponential time complexity for cases
851 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
852 hash_map
<tree
, name_expansion
*> **cache
)
854 tree_to_aff_combination (expr
, type
, comb
);
855 aff_combination_expand (comb
, cache
);
858 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
859 hash_map::traverse. */
862 free_name_expansion (tree
const &, name_expansion
**value
, void *)
868 /* Frees memory allocated for the CACHE used by
869 tree_to_aff_combination_expand. */
872 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
877 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
882 /* If VAL != CST * DIV for any constant CST, returns false.
883 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
884 and if they are different, returns false. Finally, if neither of these
885 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
889 wide_int_constant_multiple_p (const poly_widest_int
&val
,
890 const poly_widest_int
&div
,
891 bool *mult_set
, poly_widest_int
*mult
)
893 poly_widest_int rem
, cst
;
895 if (known_eq (val
, 0))
897 if (*mult_set
&& maybe_ne (*mult
, 0))
904 if (maybe_eq (div
, 0))
907 if (!multiple_p (val
, div
, &cst
))
910 if (*mult_set
&& maybe_ne (*mult
, cst
))
918 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
919 X is stored to MULT. */
922 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
923 poly_widest_int
*mult
)
925 bool mult_set
= false;
928 if (val
->n
== 0 && known_eq (val
->offset
, 0))
933 if (val
->n
!= div
->n
)
936 if (val
->rest
|| div
->rest
)
939 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
943 for (i
= 0; i
< div
->n
; i
++)
945 class aff_comb_elt
*elt
946 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
949 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
954 gcc_assert (mult_set
);
958 /* Prints the affine VAL to the FILE. */
961 print_aff (FILE *file
, aff_tree
*val
)
964 signop sgn
= TYPE_SIGN (val
->type
);
965 if (POINTER_TYPE_P (val
->type
))
967 fprintf (file
, "{\n type = ");
968 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
969 fprintf (file
, "\n offset = ");
970 print_dec (val
->offset
, file
, sgn
);
973 fprintf (file
, "\n elements = {\n");
974 for (i
= 0; i
< val
->n
; i
++)
976 fprintf (file
, " [%d] = ", i
);
977 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
979 fprintf (file
, " * ");
980 print_dec (val
->elts
[i
].coef
, file
, sgn
);
982 fprintf (file
, ", \n");
984 fprintf (file
, "\n }");
988 fprintf (file
, "\n rest = ");
989 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
991 fprintf (file
, "\n}");
994 /* Prints the affine VAL to the standard error, used for debugging. */
997 debug_aff (aff_tree
*val
)
999 print_aff (stderr
, val
);
1000 fprintf (stderr
, "\n");
1003 /* Computes address of the reference REF in ADDR. The size of the accessed
1004 location is stored to SIZE. Returns the ultimate containing object to
1005 which REF refers. */
1008 get_inner_reference_aff (tree ref
, aff_tree
*addr
, poly_widest_int
*size
)
1010 poly_int64 bitsize
, bitpos
;
1015 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
1017 tree base_addr
= build_fold_addr_expr (base
);
1019 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
1021 tree_to_aff_combination (base_addr
, sizetype
, addr
);
1025 tree_to_aff_combination (toff
, sizetype
, &tmp
);
1026 aff_combination_add (addr
, &tmp
);
1029 aff_combination_const (&tmp
, sizetype
, bits_to_bytes_round_down (bitpos
));
1030 aff_combination_add (addr
, &tmp
);
1032 *size
= bits_to_bytes_round_up (bitsize
);
1037 /* Returns true if a region of size SIZE1 at position 0 and a region of
1038 size SIZE2 at position DIFF cannot overlap. */
1041 aff_comb_cannot_overlap_p (aff_tree
*diff
, const poly_widest_int
&size1
,
1042 const poly_widest_int
&size2
)
1044 /* Unless the difference is a constant, we fail. */
1048 if (!ordered_p (diff
->offset
, 0))
1051 if (maybe_lt (diff
->offset
, 0))
1053 /* The second object is before the first one, we succeed if the last
1054 element of the second object is before the start of the first one. */
1055 return known_le (diff
->offset
+ size2
, 0);
1059 /* We succeed if the second object starts after the first one ends. */
1060 return known_le (size1
, diff
->offset
);