1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "diagnostic.h"
30 #include "tree-dump.h"
31 #include "pointer-set.h"
32 #include "tree-affine.h"
36 /* Extends CST as appropriate for the affine combinations COMB. */
39 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
41 return double_int_sext (cst
, TYPE_PRECISION (comb
->type
));
44 /* Initializes affine combination COMB so that its value is zero in TYPE. */
47 aff_combination_zero (aff_tree
*comb
, tree type
)
50 comb
->offset
= double_int_zero
;
52 comb
->rest
= NULL_TREE
;
55 /* Sets COMB to CST. */
58 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
60 aff_combination_zero (comb
, type
);
61 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
64 /* Sets COMB to single element ELT. */
67 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
69 aff_combination_zero (comb
, type
);
72 comb
->elts
[0].val
= elt
;
73 comb
->elts
[0].coef
= double_int_one
;
76 /* Scales COMB by SCALE. */
79 aff_combination_scale (aff_tree
*comb
, double_int scale
)
83 scale
= double_int_ext_for_comb (scale
, comb
);
84 if (double_int_one_p (scale
))
87 if (double_int_zero_p (scale
))
89 aff_combination_zero (comb
, comb
->type
);
94 = double_int_ext_for_comb (double_int_mul (scale
, comb
->offset
), comb
);
95 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
100 = double_int_ext_for_comb (double_int_mul (scale
, comb
->elts
[i
].coef
),
102 /* A coefficient may become zero due to overflow. Remove the zero
104 if (double_int_zero_p (new_coef
))
106 comb
->elts
[j
].coef
= new_coef
;
107 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
114 tree type
= comb
->type
;
115 if (POINTER_TYPE_P (type
))
117 if (comb
->n
< MAX_AFF_ELTS
)
119 comb
->elts
[comb
->n
].coef
= scale
;
120 comb
->elts
[comb
->n
].val
= comb
->rest
;
121 comb
->rest
= NULL_TREE
;
125 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
126 double_int_to_tree (type
, scale
));
130 /* Adds ELT * SCALE to COMB. */
133 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
138 scale
= double_int_ext_for_comb (scale
, comb
);
139 if (double_int_zero_p (scale
))
142 for (i
= 0; i
< comb
->n
; i
++)
143 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
147 new_coef
= double_int_add (comb
->elts
[i
].coef
, scale
);
148 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
149 if (!double_int_zero_p (new_coef
))
151 comb
->elts
[i
].coef
= new_coef
;
156 comb
->elts
[i
] = comb
->elts
[comb
->n
];
160 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
161 comb
->elts
[comb
->n
].coef
= double_int_one
;
162 comb
->elts
[comb
->n
].val
= comb
->rest
;
163 comb
->rest
= NULL_TREE
;
168 if (comb
->n
< MAX_AFF_ELTS
)
170 comb
->elts
[comb
->n
].coef
= scale
;
171 comb
->elts
[comb
->n
].val
= elt
;
177 if (POINTER_TYPE_P (type
))
180 if (double_int_one_p (scale
))
181 elt
= fold_convert (type
, elt
);
183 elt
= fold_build2 (MULT_EXPR
, type
,
184 fold_convert (type
, elt
),
185 double_int_to_tree (type
, scale
));
188 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
197 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
199 c
->offset
= double_int_ext_for_comb (double_int_add (c
->offset
, cst
), c
);
202 /* Adds COMB2 to COMB1. */
205 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
209 aff_combination_add_cst (comb1
, comb2
->offset
);
210 for (i
= 0; i
< comb2
->n
; i
++)
211 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
213 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
216 /* Converts affine combination COMB to TYPE. */
219 aff_combination_convert (aff_tree
*comb
, tree type
)
222 tree comb_type
= comb
->type
;
224 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
226 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
227 tree_to_aff_combination (val
, type
, comb
);
232 if (comb
->rest
&& !POINTER_TYPE_P (type
))
233 comb
->rest
= fold_convert (type
, comb
->rest
);
235 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
238 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
239 for (i
= j
= 0; i
< comb
->n
; i
++)
241 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
242 if (double_int_zero_p (new_coef
))
244 comb
->elts
[j
].coef
= new_coef
;
245 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
250 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
252 comb
->elts
[comb
->n
].coef
= double_int_one
;
253 comb
->elts
[comb
->n
].val
= comb
->rest
;
254 comb
->rest
= NULL_TREE
;
259 /* Splits EXPR into an affine combination of parts. */
262 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
266 tree cst
, core
, toffset
;
267 HOST_WIDE_INT bitpos
, bitsize
;
268 enum machine_mode mode
;
269 int unsignedp
, volatilep
;
273 code
= TREE_CODE (expr
);
277 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
280 case POINTER_PLUS_EXPR
:
281 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
283 aff_combination_add (comb
, &tmp
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
289 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
290 if (code
== MINUS_EXPR
)
291 aff_combination_scale (&tmp
, double_int_minus_one
);
292 aff_combination_add (comb
, &tmp
);
296 cst
= TREE_OPERAND (expr
, 1);
297 if (TREE_CODE (cst
) != INTEGER_CST
)
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, tree_to_double_int (cst
));
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, double_int_minus_one
);
310 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
311 aff_combination_scale (comb
, double_int_minus_one
);
312 aff_combination_add_cst (comb
, double_int_minus_one
);
316 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
317 &toffset
, &mode
, &unsignedp
, &volatilep
,
319 if (bitpos
% BITS_PER_UNIT
!= 0)
321 aff_combination_const (comb
, type
,
322 uhwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
323 core
= build_fold_addr_expr (core
);
324 if (TREE_CODE (core
) == ADDR_EXPR
)
325 aff_combination_add_elt (comb
, core
, double_int_one
);
328 tree_to_aff_combination (core
, type
, &tmp
);
329 aff_combination_add (comb
, &tmp
);
333 tree_to_aff_combination (toffset
, type
, &tmp
);
334 aff_combination_add (comb
, &tmp
);
342 aff_combination_elt (comb
, type
, expr
);
345 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
349 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
354 if (POINTER_TYPE_P (type
))
357 scale
= double_int_ext_for_comb (scale
, comb
);
358 elt
= fold_convert (type1
, elt
);
360 if (double_int_one_p (scale
))
363 return fold_convert (type
, elt
);
365 if (POINTER_TYPE_P (type
))
366 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
367 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
370 if (double_int_minus_one_p (scale
))
373 return fold_convert (type
, fold_build1 (NEGATE_EXPR
, type1
, elt
));
375 if (POINTER_TYPE_P (type
))
377 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
378 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
380 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
384 return fold_convert (type
,
385 fold_build2 (MULT_EXPR
, type1
, elt
,
386 double_int_to_tree (type1
, scale
)));
388 if (double_int_negative_p (scale
))
391 scale
= double_int_neg (scale
);
396 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
397 double_int_to_tree (type1
, scale
));
398 if (POINTER_TYPE_P (type
))
400 if (code
== MINUS_EXPR
)
401 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
402 return fold_build2 (POINTER_PLUS_EXPR
, type
, expr
, elt
);
404 return fold_build2 (code
, type
, expr
, elt
);
407 /* Makes tree from the affine combination COMB. */
410 aff_combination_to_tree (aff_tree
*comb
)
412 tree type
= comb
->type
;
413 tree expr
= comb
->rest
;
417 if (POINTER_TYPE_P (type
))
420 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
422 for (i
= 0; i
< comb
->n
; i
++)
423 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
426 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
428 if (double_int_negative_p (comb
->offset
))
430 off
= double_int_neg (comb
->offset
);
431 sgn
= double_int_minus_one
;
436 sgn
= double_int_one
;
438 return add_elt_to_tree (expr
, type
, double_int_to_tree (type1
, off
), sgn
,
442 /* Copies the tree elements of COMB to ensure that they are not shared. */
445 unshare_aff_combination (aff_tree
*comb
)
449 for (i
= 0; i
< comb
->n
; i
++)
450 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
452 comb
->rest
= unshare_expr (comb
->rest
);
455 /* Remove M-th element from COMB. */
458 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
462 comb
->elts
[m
] = comb
->elts
[comb
->n
];
465 comb
->elts
[comb
->n
].coef
= double_int_one
;
466 comb
->elts
[comb
->n
].val
= comb
->rest
;
467 comb
->rest
= NULL_TREE
;
472 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
473 C * COEF is added to R. */
477 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
483 for (i
= 0; i
< c
->n
; i
++)
485 aval
= c
->elts
[i
].val
;
488 type
= TREE_TYPE (aval
);
489 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
490 fold_convert (type
, val
));
493 aff_combination_add_elt (r
, aval
,
494 double_int_mul (coef
, c
->elts
[i
].coef
));
502 type
= TREE_TYPE (aval
);
503 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
504 fold_convert (type
, val
));
507 aff_combination_add_elt (r
, aval
, coef
);
511 aff_combination_add_elt (r
, val
,
512 double_int_mul (coef
, c
->offset
));
514 aff_combination_add_cst (r
, double_int_mul (coef
, c
->offset
));
517 /* Multiplies C1 by C2, storing the result to R */
520 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
523 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
525 aff_combination_zero (r
, c1
->type
);
527 for (i
= 0; i
< c2
->n
; i
++)
528 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
530 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
531 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
534 /* Returns the element of COMB whose value is VAL, or NULL if no such
535 element exists. If IDX is not NULL, it is set to the index of VAL in
538 static struct aff_comb_elt
*
539 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
543 for (i
= 0; i
< comb
->n
; i
++)
544 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
549 return &comb
->elts
[i
];
555 /* Element of the cache that maps ssa name NAME to its expanded form
556 as an affine expression EXPANSION. */
558 struct name_expansion
562 /* True if the expansion for the name is just being generated. */
563 unsigned in_progress
: 1;
566 /* Expands SSA names in COMB recursively. CACHE is used to cache the
570 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
571 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
574 aff_tree to_add
, current
, curre
;
579 struct name_expansion
*exp
;
581 aff_combination_zero (&to_add
, comb
->type
);
582 for (i
= 0; i
< comb
->n
; i
++)
587 e
= comb
->elts
[i
].val
;
588 type
= TREE_TYPE (e
);
590 /* Look through some conversions. */
591 if (TREE_CODE (e
) == NOP_EXPR
592 && (TYPE_PRECISION (type
)
593 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
594 name
= TREE_OPERAND (e
, 0);
595 if (TREE_CODE (name
) != SSA_NAME
)
597 def
= SSA_NAME_DEF_STMT (name
);
598 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
601 code
= gimple_assign_rhs_code (def
);
603 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
604 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
605 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
608 /* We do not know whether the reference retains its value at the
609 place where the expansion is used. */
610 if (TREE_CODE_CLASS (code
) == tcc_reference
)
614 *cache
= pointer_map_create ();
615 slot
= pointer_map_insert (*cache
, e
);
616 exp
= (struct name_expansion
*) *slot
;
620 exp
= XNEW (struct name_expansion
);
621 exp
->in_progress
= 1;
623 /* In principle this is a generally valid folding, but
624 it is not unconditionally an optimization, so do it
625 here and not in fold_unary. */
626 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
627 than the type of X and overflow for the type of X is
630 && INTEGRAL_TYPE_P (type
)
631 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
632 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
633 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
634 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
635 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
636 rhs
= fold_build2 (code
, type
,
637 fold_convert (type
, gimple_assign_rhs1 (def
)),
638 fold_convert (type
, gimple_assign_rhs2 (def
)));
641 rhs
= gimple_assign_rhs_to_tree (def
);
643 rhs
= fold_convert (type
, rhs
);
645 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
646 exp
->expansion
= current
;
647 exp
->in_progress
= 0;
651 /* Since we follow the definitions in the SSA form, we should not
652 enter a cycle unless we pass through a phi node. */
653 gcc_assert (!exp
->in_progress
);
654 current
= exp
->expansion
;
657 /* Accumulate the new terms to TO_ADD, so that we do not modify
658 COMB while traversing it; include the term -coef * E, to remove
660 scale
= comb
->elts
[i
].coef
;
661 aff_combination_zero (&curre
, comb
->type
);
662 aff_combination_add_elt (&curre
, e
, double_int_neg (scale
));
663 aff_combination_scale (¤t
, scale
);
664 aff_combination_add (&to_add
, ¤t
);
665 aff_combination_add (&to_add
, &curre
);
667 aff_combination_add (comb
, &to_add
);
670 /* Similar to tree_to_aff_combination, but follows SSA name definitions
671 and expands them recursively. CACHE is used to cache the expansions
672 of the ssa names, to avoid exponential time complexity for cases
681 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
682 struct pointer_map_t
**cache
)
684 tree_to_aff_combination (expr
, type
, comb
);
685 aff_combination_expand (comb
, cache
);
688 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
689 pointer_map_traverse. */
692 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
693 void *data ATTRIBUTE_UNUSED
)
695 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
701 /* Frees memory allocated for the CACHE used by
702 tree_to_aff_combination_expand. */
705 free_affine_expand_cache (struct pointer_map_t
**cache
)
710 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
711 pointer_map_destroy (*cache
);
715 /* If VAL != CST * DIV for any constant CST, returns false.
716 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
717 additionally compares CST and MULT, and if they are different,
718 returns false. Finally, if neither of these two cases occur,
719 true is returned, and if CST != 0, CST is stored to MULT and
720 MULT_SET is set to true. */
723 double_int_constant_multiple_p (double_int val
, double_int div
,
724 bool *mult_set
, double_int
*mult
)
728 if (double_int_zero_p (val
))
731 if (double_int_zero_p (div
))
734 cst
= double_int_sdivmod (val
, div
, FLOOR_DIV_EXPR
, &rem
);
735 if (!double_int_zero_p (rem
))
738 if (*mult_set
&& !double_int_equal_p (*mult
, cst
))
746 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
747 X is stored to MULT. */
750 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
753 bool mult_set
= false;
756 if (val
->n
== 0 && double_int_zero_p (val
->offset
))
758 *mult
= double_int_zero
;
761 if (val
->n
!= div
->n
)
764 if (val
->rest
|| div
->rest
)
767 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
771 for (i
= 0; i
< div
->n
; i
++)
773 struct aff_comb_elt
*elt
774 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
777 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
782 gcc_assert (mult_set
);
786 /* Prints the affine VAL to the FILE. */
789 print_aff (FILE *file
, aff_tree
*val
)
792 bool uns
= TYPE_UNSIGNED (val
->type
);
793 if (POINTER_TYPE_P (val
->type
))
795 fprintf (file
, "{\n type = ");
796 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
797 fprintf (file
, "\n offset = ");
798 dump_double_int (file
, val
->offset
, uns
);
801 fprintf (file
, "\n elements = {\n");
802 for (i
= 0; i
< val
->n
; i
++)
804 fprintf (file
, " [%d] = ", i
);
805 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
807 fprintf (file
, " * ");
808 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
810 fprintf (file
, ", \n");
812 fprintf (file
, "\n }");
816 fprintf (file
, "\n rest = ");
817 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
819 fprintf (file
, "\n}");
822 /* Prints the affine VAL to the standard error, used for debugging. */
825 debug_aff (aff_tree
*val
)
827 print_aff (stderr
, val
);
828 fprintf (stderr
, "\n");
831 /* Returns address of the reference REF in ADDR. The size of the accessed
832 location is stored to SIZE. */
835 get_inner_reference_aff (tree ref
, aff_tree
*addr
, double_int
*size
)
837 HOST_WIDE_INT bitsize
, bitpos
;
839 enum machine_mode mode
;
842 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
844 tree base_addr
= build_fold_addr_expr (base
);
846 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
848 tree_to_aff_combination (base_addr
, sizetype
, addr
);
852 tree_to_aff_combination (toff
, sizetype
, &tmp
);
853 aff_combination_add (addr
, &tmp
);
856 aff_combination_const (&tmp
, sizetype
,
857 shwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
858 aff_combination_add (addr
, &tmp
);
860 *size
= shwi_to_double_int ((bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);