1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pretty-print.h"
28 #include "fold-const.h"
29 #include "tree-affine.h"
32 #include "cfgexpand.h"
34 /* Extends CST as appropriate for the affine combinations COMB. */
37 wide_int_ext_for_comb (const widest_int
&cst
, tree type
)
39 return wi::sext (cst
, TYPE_PRECISION (type
));
42 /* Initializes affine combination COMB so that its value is zero in TYPE. */
45 aff_combination_zero (aff_tree
*comb
, tree type
)
51 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
52 comb
->elts
[i
].coef
= 0;
53 comb
->rest
= NULL_TREE
;
56 /* Sets COMB to CST. */
59 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
61 aff_combination_zero (comb
, type
);
62 comb
->offset
= wide_int_ext_for_comb (cst
, comb
->type
);;
65 /* Sets COMB to single element ELT. */
68 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
70 aff_combination_zero (comb
, type
);
73 comb
->elts
[0].val
= elt
;
74 comb
->elts
[0].coef
= 1;
77 /* Scales COMB by SCALE. */
80 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
84 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
90 aff_combination_zero (comb
, comb
->type
);
94 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
->type
);
95 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
98 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
->type
);
99 /* A coefficient may become zero due to overflow. Remove the zero
103 comb
->elts
[j
].coef
= new_coef
;
104 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
111 tree type
= comb
->type
;
112 if (POINTER_TYPE_P (type
))
114 if (comb
->n
< MAX_AFF_ELTS
)
116 comb
->elts
[comb
->n
].coef
= scale
;
117 comb
->elts
[comb
->n
].val
= comb
->rest
;
118 comb
->rest
= NULL_TREE
;
122 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
123 wide_int_to_tree (type
, scale
));
127 /* Adds ELT * SCALE to COMB. */
130 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
135 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
->type
);
139 for (i
= 0; i
< comb
->n
; i
++)
140 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
143 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
->type
);
146 comb
->elts
[i
].coef
= new_coef
;
151 comb
->elts
[i
] = comb
->elts
[comb
->n
];
155 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
156 comb
->elts
[comb
->n
].coef
= 1;
157 comb
->elts
[comb
->n
].val
= comb
->rest
;
158 comb
->rest
= NULL_TREE
;
163 if (comb
->n
< MAX_AFF_ELTS
)
165 comb
->elts
[comb
->n
].coef
= scale
;
166 comb
->elts
[comb
->n
].val
= elt
;
172 if (POINTER_TYPE_P (type
))
176 elt
= fold_convert (type
, elt
);
178 elt
= fold_build2 (MULT_EXPR
, type
,
179 fold_convert (type
, elt
),
180 wide_int_to_tree (type
, scale
));
183 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
192 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
194 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
->type
);
197 /* Adds COMB2 to COMB1. */
200 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
204 aff_combination_add_cst (comb1
, comb2
->offset
);
205 for (i
= 0; i
< comb2
->n
; i
++)
206 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
208 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
211 /* Converts affine combination COMB to TYPE. */
214 aff_combination_convert (aff_tree
*comb
, tree type
)
217 tree comb_type
= comb
->type
;
219 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
221 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
222 tree_to_aff_combination (val
, type
, comb
);
227 if (comb
->rest
&& !POINTER_TYPE_P (type
))
228 comb
->rest
= fold_convert (type
, comb
->rest
);
230 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
233 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
->type
);
234 for (i
= j
= 0; i
< comb
->n
; i
++)
236 if (comb
->elts
[i
].coef
== 0)
238 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
239 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
244 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
246 comb
->elts
[comb
->n
].coef
= 1;
247 comb
->elts
[comb
->n
].val
= comb
->rest
;
248 comb
->rest
= NULL_TREE
;
253 /* Splits EXPR into an affine combination of parts. */
256 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
260 tree cst
, core
, toffset
;
261 HOST_WIDE_INT bitpos
, bitsize
;
263 int unsignedp
, reversep
, volatilep
;
267 code
= TREE_CODE (expr
);
271 aff_combination_const (comb
, type
, wi::to_widest (expr
));
274 case POINTER_PLUS_EXPR
:
275 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
276 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
277 aff_combination_add (comb
, &tmp
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
283 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
284 if (code
== MINUS_EXPR
)
285 aff_combination_scale (&tmp
, -1);
286 aff_combination_add (comb
, &tmp
);
290 cst
= TREE_OPERAND (expr
, 1);
291 if (TREE_CODE (cst
) != INTEGER_CST
)
293 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
294 aff_combination_scale (comb
, wi::to_widest (cst
));
298 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
299 aff_combination_scale (comb
, -1);
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, -1);
306 aff_combination_add_cst (comb
, -1);
310 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
311 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
313 expr
= TREE_OPERAND (expr
, 0);
314 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
315 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
316 aff_combination_add (comb
, &tmp
);
319 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
320 &toffset
, &mode
, &unsignedp
, &reversep
,
322 if (bitpos
% BITS_PER_UNIT
!= 0)
324 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
325 if (TREE_CODE (core
) == MEM_REF
)
327 aff_combination_add_cst (comb
, wi::to_widest (TREE_OPERAND (core
, 1)));
328 core
= TREE_OPERAND (core
, 0);
331 core
= build_fold_addr_expr (core
);
333 if (TREE_CODE (core
) == ADDR_EXPR
)
334 aff_combination_add_elt (comb
, core
, 1);
337 tree_to_aff_combination (core
, type
, &tmp
);
338 aff_combination_add (comb
, &tmp
);
342 tree_to_aff_combination (toffset
, type
, &tmp
);
343 aff_combination_add (comb
, &tmp
);
348 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
349 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
351 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
353 aff_combination_elt (comb
, type
, expr
);
357 aff_combination_elt (comb
, type
,
358 build2 (MEM_REF
, TREE_TYPE (expr
),
359 TREE_OPERAND (expr
, 0),
361 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
362 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
363 aff_combination_add (comb
, &tmp
);
370 aff_combination_elt (comb
, type
, expr
);
373 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
377 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
)
381 widest_int scale
= wide_int_ext_for_comb (scale_in
, type
);
383 elt
= fold_convert (type
, elt
);
389 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
395 return fold_build1 (NEGATE_EXPR
, type
, elt
);
397 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
401 return fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
403 if (wi::neg_p (scale
))
411 elt
= fold_build2 (MULT_EXPR
, type
, elt
, wide_int_to_tree (type
, scale
));
412 return fold_build2 (code
, type
, expr
, elt
);
415 /* Makes tree from the affine combination COMB. */
418 aff_combination_to_tree (aff_tree
*comb
)
420 tree type
= comb
->type
, base
= NULL_TREE
, expr
= NULL_TREE
;
424 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
427 if (POINTER_TYPE_P (type
))
430 if (comb
->n
> 0 && comb
->elts
[0].coef
== 1
431 && POINTER_TYPE_P (TREE_TYPE (comb
->elts
[0].val
)))
433 base
= comb
->elts
[0].val
;
438 for (; i
< comb
->n
; i
++)
439 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
);
442 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1);
444 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
446 if (wi::neg_p (comb
->offset
))
456 expr
= add_elt_to_tree (expr
, type
, wide_int_to_tree (type
, off
), sgn
);
459 return fold_build_pointer_plus (base
, expr
);
461 return fold_convert (comb
->type
, expr
);
464 /* Copies the tree elements of COMB to ensure that they are not shared. */
467 unshare_aff_combination (aff_tree
*comb
)
471 for (i
= 0; i
< comb
->n
; i
++)
472 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
474 comb
->rest
= unshare_expr (comb
->rest
);
477 /* Remove M-th element from COMB. */
480 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
484 comb
->elts
[m
] = comb
->elts
[comb
->n
];
487 comb
->elts
[comb
->n
].coef
= 1;
488 comb
->elts
[comb
->n
].val
= comb
->rest
;
489 comb
->rest
= NULL_TREE
;
494 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
495 C * COEF is added to R. */
499 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
505 for (i
= 0; i
< c
->n
; i
++)
507 aval
= c
->elts
[i
].val
;
510 type
= TREE_TYPE (aval
);
511 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
512 fold_convert (type
, val
));
515 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
523 type
= TREE_TYPE (aval
);
524 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
525 fold_convert (type
, val
));
528 aff_combination_add_elt (r
, aval
, coef
);
532 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
534 aff_combination_add_cst (r
, coef
* c
->offset
);
537 /* Multiplies C1 by C2, storing the result to R */
540 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
543 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
545 aff_combination_zero (r
, c1
->type
);
547 for (i
= 0; i
< c2
->n
; i
++)
548 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
550 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
551 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
554 /* Returns the element of COMB whose value is VAL, or NULL if no such
555 element exists. If IDX is not NULL, it is set to the index of VAL in
558 static struct aff_comb_elt
*
559 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
563 for (i
= 0; i
< comb
->n
; i
++)
564 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
569 return &comb
->elts
[i
];
575 /* Element of the cache that maps ssa name NAME to its expanded form
576 as an affine expression EXPANSION. */
578 struct name_expansion
582 /* True if the expansion for the name is just being generated. */
583 unsigned in_progress
: 1;
586 /* Expands SSA names in COMB recursively. CACHE is used to cache the
590 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
591 hash_map
<tree
, name_expansion
*> **cache
)
594 aff_tree to_add
, current
, curre
;
598 struct name_expansion
*exp
;
600 aff_combination_zero (&to_add
, comb
->type
);
601 for (i
= 0; i
< comb
->n
; i
++)
606 e
= comb
->elts
[i
].val
;
607 type
= TREE_TYPE (e
);
609 /* Look through some conversions. */
610 if (CONVERT_EXPR_P (e
)
611 && (TYPE_PRECISION (type
)
612 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
613 name
= TREE_OPERAND (e
, 0);
614 if (TREE_CODE (name
) != SSA_NAME
)
616 def
= SSA_NAME_DEF_STMT (name
);
617 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
620 code
= gimple_assign_rhs_code (def
);
622 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
623 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
624 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
627 /* We do not know whether the reference retains its value at the
628 place where the expansion is used. */
629 if (TREE_CODE_CLASS (code
) == tcc_reference
)
633 *cache
= new hash_map
<tree
, name_expansion
*>;
634 name_expansion
**slot
= &(*cache
)->get_or_insert (e
);
639 exp
= XNEW (struct name_expansion
);
640 exp
->in_progress
= 1;
642 /* In principle this is a generally valid folding, but
643 it is not unconditionally an optimization, so do it
644 here and not in fold_unary. */
645 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
646 than the type of X and overflow for the type of X is
649 && INTEGRAL_TYPE_P (type
)
650 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
651 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
652 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
653 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
654 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
655 rhs
= fold_build2 (code
, type
,
656 fold_convert (type
, gimple_assign_rhs1 (def
)),
657 fold_convert (type
, gimple_assign_rhs2 (def
)));
660 rhs
= gimple_assign_rhs_to_tree (def
);
662 rhs
= fold_convert (type
, rhs
);
664 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
665 exp
->expansion
= current
;
666 exp
->in_progress
= 0;
670 /* Since we follow the definitions in the SSA form, we should not
671 enter a cycle unless we pass through a phi node. */
672 gcc_assert (!exp
->in_progress
);
673 current
= exp
->expansion
;
676 /* Accumulate the new terms to TO_ADD, so that we do not modify
677 COMB while traversing it; include the term -coef * E, to remove
679 scale
= comb
->elts
[i
].coef
;
680 aff_combination_zero (&curre
, comb
->type
);
681 aff_combination_add_elt (&curre
, e
, -scale
);
682 aff_combination_scale (¤t
, scale
);
683 aff_combination_add (&to_add
, ¤t
);
684 aff_combination_add (&to_add
, &curre
);
686 aff_combination_add (comb
, &to_add
);
689 /* Similar to tree_to_aff_combination, but follows SSA name definitions
690 and expands them recursively. CACHE is used to cache the expansions
691 of the ssa names, to avoid exponential time complexity for cases
700 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
701 hash_map
<tree
, name_expansion
*> **cache
)
703 tree_to_aff_combination (expr
, type
, comb
);
704 aff_combination_expand (comb
, cache
);
707 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
708 hash_map::traverse. */
711 free_name_expansion (tree
const &, name_expansion
**value
, void *)
717 /* Frees memory allocated for the CACHE used by
718 tree_to_aff_combination_expand. */
721 free_affine_expand_cache (hash_map
<tree
, name_expansion
*> **cache
)
726 (*cache
)->traverse
<void *, free_name_expansion
> (NULL
);
731 /* If VAL != CST * DIV for any constant CST, returns false.
732 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
733 and if they are different, returns false. Finally, if neither of these
734 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
738 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
739 bool *mult_set
, widest_int
*mult
)
745 if (*mult_set
&& *mult
!= 0)
755 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
758 if (*mult_set
&& *mult
!= cst
)
766 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
767 X is stored to MULT. */
770 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
773 bool mult_set
= false;
776 if (val
->n
== 0 && val
->offset
== 0)
781 if (val
->n
!= div
->n
)
784 if (val
->rest
|| div
->rest
)
787 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
791 for (i
= 0; i
< div
->n
; i
++)
793 struct aff_comb_elt
*elt
794 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
797 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
802 gcc_assert (mult_set
);
806 /* Prints the affine VAL to the FILE. */
809 print_aff (FILE *file
, aff_tree
*val
)
812 signop sgn
= TYPE_SIGN (val
->type
);
813 if (POINTER_TYPE_P (val
->type
))
815 fprintf (file
, "{\n type = ");
816 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
817 fprintf (file
, "\n offset = ");
818 print_dec (val
->offset
, file
, sgn
);
821 fprintf (file
, "\n elements = {\n");
822 for (i
= 0; i
< val
->n
; i
++)
824 fprintf (file
, " [%d] = ", i
);
825 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
827 fprintf (file
, " * ");
828 print_dec (val
->elts
[i
].coef
, file
, sgn
);
830 fprintf (file
, ", \n");
832 fprintf (file
, "\n }");
836 fprintf (file
, "\n rest = ");
837 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
839 fprintf (file
, "\n}");
842 /* Prints the affine VAL to the standard error, used for debugging. */
845 debug_aff (aff_tree
*val
)
847 print_aff (stderr
, val
);
848 fprintf (stderr
, "\n");
851 /* Computes address of the reference REF in ADDR. The size of the accessed
852 location is stored to SIZE. Returns the ultimate containing object to
856 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
858 HOST_WIDE_INT bitsize
, bitpos
;
863 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
865 tree base_addr
= build_fold_addr_expr (base
);
867 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
869 tree_to_aff_combination (base_addr
, sizetype
, addr
);
873 tree_to_aff_combination (toff
, sizetype
, &tmp
);
874 aff_combination_add (addr
, &tmp
);
877 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
878 aff_combination_add (addr
, &tmp
);
880 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
885 /* Returns true if a region of size SIZE1 at position 0 and a region of
886 size SIZE2 at position DIFF cannot overlap. */
889 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
890 const widest_int
&size2
)
892 /* Unless the difference is a constant, we fail. */
896 if (wi::neg_p (diff
->offset
))
898 /* The second object is before the first one, we succeed if the last
899 element of the second object is before the start of the first one. */
900 return wi::neg_p (diff
->offset
+ size2
- 1);
904 /* We succeed if the second object starts after the first one ends. */
905 return size1
<= diff
->offset
;