1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "diagnostic.h"
30 #include "tree-dump.h"
31 #include "pointer-set.h"
32 #include "tree-affine.h"
33 #include "tree-gimple.h"
35 /* Extends CST as appropriate for the affine combinations COMB. */
38 double_int_ext_for_comb (double_int cst
, aff_tree
*comb
)
40 return double_int_sext (cst
, TYPE_PRECISION (comb
->type
));
43 /* Initializes affine combination COMB so that its value is zero in TYPE. */
46 aff_combination_zero (aff_tree
*comb
, tree type
)
49 comb
->offset
= double_int_zero
;
51 comb
->rest
= NULL_TREE
;
54 /* Sets COMB to CST. */
57 aff_combination_const (aff_tree
*comb
, tree type
, double_int cst
)
59 aff_combination_zero (comb
, type
);
60 comb
->offset
= double_int_ext_for_comb (cst
, comb
);
63 /* Sets COMB to single element ELT. */
66 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
68 aff_combination_zero (comb
, type
);
71 comb
->elts
[0].val
= elt
;
72 comb
->elts
[0].coef
= double_int_one
;
75 /* Scales COMB by SCALE. */
78 aff_combination_scale (aff_tree
*comb
, double_int scale
)
82 scale
= double_int_ext_for_comb (scale
, comb
);
83 if (double_int_one_p (scale
))
86 if (double_int_zero_p (scale
))
88 aff_combination_zero (comb
, comb
->type
);
93 = double_int_ext_for_comb (double_int_mul (scale
, comb
->offset
), comb
);
94 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
99 = double_int_ext_for_comb (double_int_mul (scale
, comb
->elts
[i
].coef
),
101 /* A coefficient may become zero due to overflow. Remove the zero
103 if (double_int_zero_p (new_coef
))
105 comb
->elts
[j
].coef
= new_coef
;
106 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
113 tree type
= comb
->type
;
114 if (POINTER_TYPE_P (type
))
116 if (comb
->n
< MAX_AFF_ELTS
)
118 comb
->elts
[comb
->n
].coef
= scale
;
119 comb
->elts
[comb
->n
].val
= comb
->rest
;
120 comb
->rest
= NULL_TREE
;
124 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
125 double_int_to_tree (type
, scale
));
129 /* Adds ELT * SCALE to COMB. */
132 aff_combination_add_elt (aff_tree
*comb
, tree elt
, double_int scale
)
137 scale
= double_int_ext_for_comb (scale
, comb
);
138 if (double_int_zero_p (scale
))
141 for (i
= 0; i
< comb
->n
; i
++)
142 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
146 new_coef
= double_int_add (comb
->elts
[i
].coef
, scale
);
147 new_coef
= double_int_ext_for_comb (new_coef
, comb
);
148 if (!double_int_zero_p (new_coef
))
150 comb
->elts
[i
].coef
= new_coef
;
155 comb
->elts
[i
] = comb
->elts
[comb
->n
];
159 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
160 comb
->elts
[comb
->n
].coef
= double_int_one
;
161 comb
->elts
[comb
->n
].val
= comb
->rest
;
162 comb
->rest
= NULL_TREE
;
167 if (comb
->n
< MAX_AFF_ELTS
)
169 comb
->elts
[comb
->n
].coef
= scale
;
170 comb
->elts
[comb
->n
].val
= elt
;
176 if (POINTER_TYPE_P (type
))
179 if (double_int_one_p (scale
))
180 elt
= fold_convert (type
, elt
);
182 elt
= fold_build2 (MULT_EXPR
, type
,
183 fold_convert (type
, elt
),
184 double_int_to_tree (type
, scale
));
187 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
196 aff_combination_add_cst (aff_tree
*c
, double_int cst
)
198 c
->offset
= double_int_ext_for_comb (double_int_add (c
->offset
, cst
), c
);
201 /* Adds COMB2 to COMB1. */
204 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
208 aff_combination_add_cst (comb1
, comb2
->offset
);
209 for (i
= 0; i
< comb2
->n
; i
++)
210 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
212 aff_combination_add_elt (comb1
, comb2
->rest
, double_int_one
);
215 /* Converts affine combination COMB to TYPE. */
218 aff_combination_convert (aff_tree
*comb
, tree type
)
221 tree comb_type
= comb
->type
;
223 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
225 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
226 tree_to_aff_combination (val
, type
, comb
);
231 if (comb
->rest
&& !POINTER_TYPE_P (type
))
232 comb
->rest
= fold_convert (type
, comb
->rest
);
234 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
237 comb
->offset
= double_int_ext_for_comb (comb
->offset
, comb
);
238 for (i
= j
= 0; i
< comb
->n
; i
++)
240 double_int new_coef
= double_int_ext_for_comb (comb
->elts
[i
].coef
, comb
);
241 if (double_int_zero_p (new_coef
))
243 comb
->elts
[j
].coef
= new_coef
;
244 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
249 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
251 comb
->elts
[comb
->n
].coef
= double_int_one
;
252 comb
->elts
[comb
->n
].val
= comb
->rest
;
253 comb
->rest
= NULL_TREE
;
258 /* Splits EXPR into an affine combination of parts. */
261 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
265 tree cst
, core
, toffset
;
266 HOST_WIDE_INT bitpos
, bitsize
;
267 enum machine_mode mode
;
268 int unsignedp
, volatilep
;
272 code
= TREE_CODE (expr
);
276 aff_combination_const (comb
, type
, tree_to_double_int (expr
));
279 case POINTER_PLUS_EXPR
:
280 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
281 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
282 aff_combination_convert (&tmp
, type
);
283 aff_combination_add (comb
, &tmp
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
289 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
290 if (code
== MINUS_EXPR
)
291 aff_combination_scale (&tmp
, double_int_minus_one
);
292 aff_combination_add (comb
, &tmp
);
296 cst
= TREE_OPERAND (expr
, 1);
297 if (TREE_CODE (cst
) != INTEGER_CST
)
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, tree_to_double_int (cst
));
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, double_int_minus_one
);
310 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
311 aff_combination_scale (comb
, double_int_minus_one
);
312 aff_combination_add_cst (comb
, double_int_minus_one
);
316 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
317 &toffset
, &mode
, &unsignedp
, &volatilep
,
319 if (bitpos
% BITS_PER_UNIT
!= 0)
321 aff_combination_const (comb
, type
,
322 uhwi_to_double_int (bitpos
/ BITS_PER_UNIT
));
323 core
= build_fold_addr_expr (core
);
324 if (TREE_CODE (core
) == ADDR_EXPR
)
325 aff_combination_add_elt (comb
, core
, double_int_one
);
328 tree_to_aff_combination (core
, type
, &tmp
);
329 aff_combination_add (comb
, &tmp
);
333 tree_to_aff_combination (toffset
, type
, &tmp
);
334 aff_combination_add (comb
, &tmp
);
342 aff_combination_elt (comb
, type
, expr
);
345 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
349 add_elt_to_tree (tree expr
, tree type
, tree elt
, double_int scale
,
354 scale
= double_int_ext_for_comb (scale
, comb
);
355 elt
= fold_convert (type
, elt
);
357 if (double_int_one_p (scale
))
362 return fold_build2 (PLUS_EXPR
, type
, expr
, elt
);
365 if (double_int_minus_one_p (scale
))
368 return fold_build1 (NEGATE_EXPR
, type
, elt
);
370 return fold_build2 (MINUS_EXPR
, type
, expr
, elt
);
374 return fold_build2 (MULT_EXPR
, type
, elt
,
375 double_int_to_tree (type
, scale
));
377 if (double_int_negative_p (scale
))
380 scale
= double_int_neg (scale
);
385 elt
= fold_build2 (MULT_EXPR
, type
, elt
,
386 double_int_to_tree (type
, scale
));
387 return fold_build2 (code
, type
, expr
, elt
);
390 /* Makes tree from the affine combination COMB. */
393 aff_combination_to_tree (aff_tree
*comb
)
395 tree type
= comb
->type
;
396 tree expr
= comb
->rest
;
400 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
402 for (i
= 0; i
< comb
->n
; i
++)
403 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
406 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
408 if (double_int_negative_p (comb
->offset
))
410 off
= double_int_neg (comb
->offset
);
411 sgn
= double_int_minus_one
;
416 sgn
= double_int_one
;
418 return add_elt_to_tree (expr
, type
, double_int_to_tree (type
, off
), sgn
,
422 /* Copies the tree elements of COMB to ensure that they are not shared. */
425 unshare_aff_combination (aff_tree
*comb
)
429 for (i
= 0; i
< comb
->n
; i
++)
430 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
432 comb
->rest
= unshare_expr (comb
->rest
);
435 /* Remove M-th element from COMB. */
438 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
442 comb
->elts
[m
] = comb
->elts
[comb
->n
];
445 comb
->elts
[comb
->n
].coef
= double_int_one
;
446 comb
->elts
[comb
->n
].val
= comb
->rest
;
447 comb
->rest
= NULL_TREE
;
452 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
453 C * COEF is added to R. */
457 aff_combination_add_product (aff_tree
*c
, double_int coef
, tree val
,
463 for (i
= 0; i
< c
->n
; i
++)
465 aval
= c
->elts
[i
].val
;
468 type
= TREE_TYPE (aval
);
469 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
470 fold_convert (type
, val
));
473 aff_combination_add_elt (r
, aval
,
474 double_int_mul (coef
, c
->elts
[i
].coef
));
482 type
= TREE_TYPE (aval
);
483 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
484 fold_convert (type
, val
));
487 aff_combination_add_elt (r
, aval
, coef
);
491 aff_combination_add_elt (r
, val
,
492 double_int_mul (coef
, c
->offset
));
494 aff_combination_add_cst (r
, double_int_mul (coef
, c
->offset
));
497 /* Multiplies C1 by C2, storing the result to R */
500 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
503 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
505 aff_combination_zero (r
, c1
->type
);
507 for (i
= 0; i
< c2
->n
; i
++)
508 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
510 aff_combination_add_product (c1
, double_int_one
, c2
->rest
, r
);
511 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
514 /* Returns the element of COMB whose value is VAL, or NULL if no such
515 element exists. If IDX is not NULL, it is set to the index of VAL in
518 static struct aff_comb_elt
*
519 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
523 for (i
= 0; i
< comb
->n
; i
++)
524 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
529 return &comb
->elts
[i
];
535 /* Element of the cache that maps ssa name NAME to its expanded form
536 as an affine expression EXPANSION. */
538 struct name_expansion
542 /* True if the expansion for the name is just being generated. */
543 unsigned in_progress
: 1;
546 /* Similar to tree_to_aff_combination, but follows SSA name definitions
547 and expands them recursively. CACHE is used to cache the expansions
548 of the ssa names, to avoid exponential time complexity for cases
557 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
558 struct pointer_map_t
**cache
)
561 aff_tree to_add
, current
, curre
;
565 struct name_expansion
*exp
;
567 tree_to_aff_combination (expr
, type
, comb
);
568 aff_combination_zero (&to_add
, type
);
569 for (i
= 0; i
< comb
->n
; i
++)
571 e
= comb
->elts
[i
].val
;
572 if (TREE_CODE (e
) != SSA_NAME
)
574 def
= SSA_NAME_DEF_STMT (e
);
575 if (TREE_CODE (def
) != GIMPLE_MODIFY_STMT
576 || GIMPLE_STMT_OPERAND (def
, 0) != e
)
579 rhs
= GIMPLE_STMT_OPERAND (def
, 1);
580 if (TREE_CODE (rhs
) != SSA_NAME
582 && !is_gimple_min_invariant (rhs
))
585 /* We do not know whether the reference retains its value at the
586 place where the expansion is used. */
587 if (REFERENCE_CLASS_P (rhs
))
591 *cache
= pointer_map_create ();
592 slot
= pointer_map_insert (*cache
, e
);
597 exp
= XNEW (struct name_expansion
);
598 exp
->in_progress
= 1;
600 tree_to_aff_combination_expand (rhs
, type
, ¤t
, cache
);
601 exp
->expansion
= current
;
602 exp
->in_progress
= 0;
606 /* Since we follow the definitions in the SSA form, we should not
607 enter a cycle unless we pass through a phi node. */
608 gcc_assert (!exp
->in_progress
);
609 current
= exp
->expansion
;
612 /* Accumulate the new terms to TO_ADD, so that we do not modify
613 COMB while traversing it; include the term -coef * E, to remove
615 scale
= comb
->elts
[i
].coef
;
616 aff_combination_zero (&curre
, type
);
617 aff_combination_add_elt (&curre
, e
, double_int_neg (scale
));
618 aff_combination_scale (¤t
, scale
);
619 aff_combination_add (&to_add
, ¤t
);
620 aff_combination_add (&to_add
, &curre
);
622 aff_combination_add (comb
, &to_add
);
625 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
626 pointer_map_traverse. */
629 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
630 void *data ATTRIBUTE_UNUSED
)
632 struct name_expansion
*exp
= *value
;
638 /* Frees memory allocated for the CACHE used by
639 tree_to_aff_combination_expand. */
642 free_affine_expand_cache (struct pointer_map_t
**cache
)
647 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
648 pointer_map_destroy (*cache
);
652 /* If VAL != CST * DIV for any constant CST, returns false.
653 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
654 additionally compares CST and MULT, and if they are different,
655 returns false. Finally, if neither of these two cases occur,
656 true is returned, and if CST != 0, CST is stored to MULT and
657 MULT_SET is set to true. */
660 double_int_constant_multiple_p (double_int val
, double_int div
,
661 bool *mult_set
, double_int
*mult
)
665 if (double_int_zero_p (val
))
668 if (double_int_zero_p (div
))
671 cst
= double_int_sdivmod (val
, div
, FLOOR_DIV_EXPR
, &rem
);
672 if (!double_int_zero_p (rem
))
675 if (*mult_set
&& !double_int_equal_p (*mult
, cst
))
683 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
684 X is stored to MULT. */
687 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
690 bool mult_set
= false;
693 if (val
->n
== 0 && double_int_zero_p (val
->offset
))
695 *mult
= double_int_zero
;
698 if (val
->n
!= div
->n
)
701 if (val
->rest
|| div
->rest
)
704 if (!double_int_constant_multiple_p (val
->offset
, div
->offset
,
708 for (i
= 0; i
< div
->n
; i
++)
710 struct aff_comb_elt
*elt
711 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
714 if (!double_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
719 gcc_assert (mult_set
);
723 /* Prints the affine VAL to the FILE. */
726 print_aff (FILE *file
, aff_tree
*val
)
729 bool uns
= TYPE_UNSIGNED (val
->type
);
730 if (POINTER_TYPE_P (val
->type
))
732 fprintf (file
, "{\n type = ");
733 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
734 fprintf (file
, "\n offset = ");
735 dump_double_int (file
, val
->offset
, uns
);
738 fprintf (file
, "\n elements = {\n");
739 for (i
= 0; i
< val
->n
; i
++)
741 fprintf (file
, " [%d] = ", i
);
742 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
744 fprintf (file
, " * ");
745 dump_double_int (file
, val
->elts
[i
].coef
, uns
);
747 fprintf (file
, ", \n");
749 fprintf (file
, "\n }");
753 fprintf (file
, "\n rest = ");
754 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
756 fprintf (file
, "\n}");
759 /* Prints the affine VAL to the standard error, used for debugging. */
762 debug_aff (aff_tree
*val
)
764 print_aff (stderr
, val
);
765 fprintf (stderr
, "\n");