1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
38 #include "coretypes.h"
41 #include "basic-block.h"
42 #include "tree-pass.h"
44 #include "gimple-pretty-print.h"
47 #include "pointer-set.h"
50 #include "hash-table.h"
52 /* Information about a strength reduction candidate. Each statement
53 in the candidate table represents an expression of one of the
54 following forms (the special case of CAND_REF will be described
57 (CAND_MULT) S1: X = (B + i) * S
58 (CAND_ADD) S1: X = B + (i * S)
60 Here X and B are SSA names, i is an integer constant, and S is
61 either an SSA name or a constant. We call B the "base," i the
62 "index", and S the "stride."
64 Any statement S0 that dominates S1 and is of the form:
66 (CAND_MULT) S0: Y = (B + i') * S
67 (CAND_ADD) S0: Y = B + (i' * S)
69 is called a "basis" for S1. In both cases, S1 may be replaced by
71 S1': X = Y + (i - i') * S,
73 where (i - i') * S is folded to the extent possible.
75 All gimple statements are visited in dominator order, and each
76 statement that may contribute to one of the forms of S1 above is
77 given at least one entry in the candidate table. Such statements
78 include addition, pointer addition, subtraction, multiplication,
79 negation, copies, and nontrivial type casts. If a statement may
80 represent more than one expression of the forms of S1 above,
81 multiple "interpretations" are stored in the table and chained
84 * An add of two SSA names may treat either operand as the base.
85 * A multiply of two SSA names, likewise.
86 * A copy or cast may be thought of as either a CAND_MULT with
87 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
89 Candidate records are allocated from an obstack. They are addressed
90 both from a hash table keyed on S1, and from a vector of candidate
91 pointers arranged in predominator order.
95 Currently we don't recognize:
100 as a strength reduction opportunity, even though this S1 would
101 also be replaceable by the S1' above. This can be added if it
102 comes up in practice.
104 Strength reduction in addressing
105 --------------------------------
106 There is another kind of candidate known as CAND_REF. A CAND_REF
107 describes a statement containing a memory reference having
108 complex addressing that might benefit from strength reduction.
109 Specifically, we are interested in references for which
110 get_inner_reference returns a base address, offset, and bitpos as
113 base: MEM_REF (T1, C1)
114 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
115 bitpos: C4 * BITS_PER_UNIT
117 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
118 arbitrary integer constants. Note that C2 may be zero, in which
119 case the offset will be MULT_EXPR (T2, C3).
121 When this pattern is recognized, the original memory reference
122 can be replaced with:
124 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
127 which distributes the multiply to allow constant folding. When
128 two or more addressing expressions can be represented by MEM_REFs
129 of this form, differing only in the constants C1, C2, and C4,
130 making this substitution produces more efficient addressing during
131 the RTL phases. When there are not at least two expressions with
132 the same values of T1, T2, and C3, there is nothing to be gained
135 Strength reduction of CAND_REFs uses the same infrastructure as
136 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
137 field, MULT_EXPR (T2, C3) in the stride (S) field, and
138 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
139 is thus another CAND_REF with the same B and S values. When at
140 least two CAND_REFs are chained together using the basis relation,
141 each of them is replaced as above, resulting in improved code
142 generation for addressing.
144 Conditional candidates
145 ======================
147 Conditional candidates are best illustrated with an example.
148 Consider the code sequence:
151 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
153 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
154 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
155 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
156 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
158 Here strength reduction is complicated by the uncertain value of x_2.
159 A legitimate transformation is:
168 (4) [x_2 = PHI <x_0, x_1>;]
169 (4a) t_2 = PHI <a_0, t_1>;
173 where the bracketed instructions may go dead.
175 To recognize this opportunity, we have to observe that statement (6)
176 has a "hidden basis" (2). The hidden basis is unlike a normal basis
177 in that the statement and the hidden basis have different base SSA
178 names (x_2 and x_0, respectively). The relationship is established
179 when a statement's base name (x_2) is defined by a phi statement (4),
180 each argument of which (x_0, x_1) has an identical "derived base name."
181 If the argument is defined by a candidate (as x_1 is by (3)) that is a
182 CAND_ADD having a stride of 1, the derived base name of the argument is
183 the base name of the candidate (x_0). Otherwise, the argument itself
184 is its derived base name (as is the case with argument x_0).
186 The hidden basis for statement (6) is the nearest dominating candidate
187 whose base name is the derived base name (x_0) of the feeding phi (4),
188 and whose stride is identical to that of the statement. We can then
189 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
190 allowing the final replacement of (6) by the strength-reduced (6r).
192 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
193 A CAND_PHI is not a candidate for replacement, but is maintained in the
194 candidate table to ease discovery of hidden bases. Any phi statement
195 whose arguments share a common derived base name is entered into the
196 table with the derived base name, an (arbitrary) index of zero, and a
197 stride of 1. A statement with a hidden basis can then be detected by
198 simply looking up its feeding phi definition in the candidate table,
199 extracting the derived base name, and searching for a basis in the
200 usual manner after substituting the derived base name.
202 Note that the transformation is only valid when the original phi and
203 the statements that define the phi's arguments are all at the same
204 position in the loop hierarchy. */
207 /* Index into the candidate vector, offset by 1. VECs are zero-based,
208 while cand_idx's are one-based, with zero indicating null. */
209 typedef unsigned cand_idx
;
211 /* The kind of candidate. */
222 /* The candidate statement S1. */
225 /* The base expression B: often an SSA name, but not always. */
231 /* The index constant i. */
234 /* The type of the candidate. This is normally the type of base_expr,
235 but casts may have occurred when combining feeding instructions.
236 A candidate can only be a basis for candidates of the same final type.
237 (For CAND_REFs, this is the type to be used for operand 1 of the
238 replacement MEM_REF.) */
241 /* The kind of candidate (CAND_MULT, etc.). */
244 /* Index of this candidate in the candidate vector. */
247 /* Index of the next candidate record for the same statement.
248 A statement may be useful in more than one way (e.g., due to
249 commutativity). So we can have multiple "interpretations"
251 cand_idx next_interp
;
253 /* Index of the basis statement S0, if any, in the candidate vector. */
256 /* First candidate for which this candidate is a basis, if one exists. */
259 /* Next candidate having the same basis as this one. */
262 /* If this is a conditional candidate, the CAND_PHI candidate
263 that defines the base SSA name B. */
266 /* Savings that can be expected from eliminating dead code if this
267 candidate is replaced. */
271 typedef struct slsr_cand_d slsr_cand
, *slsr_cand_t
;
272 typedef const struct slsr_cand_d
*const_slsr_cand_t
;
274 /* Pointers to candidates are chained together as part of a mapping
275 from base expressions to the candidates that use them. */
279 /* Base expression for the chain of candidates: often, but not
280 always, an SSA name. */
283 /* Pointer to a candidate. */
287 struct cand_chain_d
*next
;
291 typedef struct cand_chain_d cand_chain
, *cand_chain_t
;
292 typedef const struct cand_chain_d
*const_cand_chain_t
;
294 /* Information about a unique "increment" associated with candidates
295 having an SSA name for a stride. An increment is the difference
296 between the index of the candidate and the index of its basis,
297 i.e., (i - i') as discussed in the module commentary.
299 When we are not going to generate address arithmetic we treat
300 increments that differ only in sign as the same, allowing sharing
301 of the cost of initializers. The absolute value of the increment
302 is stored in the incr_info. */
306 /* The increment that relates a candidate to its basis. */
309 /* How many times the increment occurs in the candidate tree. */
312 /* Cost of replacing candidates using this increment. Negative and
313 zero costs indicate replacement should be performed. */
316 /* If this increment is profitable but is not -1, 0, or 1, it requires
317 an initializer T_0 = stride * incr to be found or introduced in the
318 nearest common dominator of all candidates. This field holds T_0
319 for subsequent use. */
322 /* If the initializer was found to already exist, this is the block
323 where it was found. */
327 typedef struct incr_info_d incr_info
, *incr_info_t
;
329 /* Candidates are maintained in a vector. If candidate X dominates
330 candidate Y, then X appears before Y in the vector; but the
331 converse does not necessarily hold. */
332 static vec
<slsr_cand_t
> cand_vec
;
346 enum phi_adjust_status
352 enum count_phis_status
358 /* Pointer map embodying a mapping from statements to candidates. */
359 static struct pointer_map_t
*stmt_cand_map
;
361 /* Obstack for candidates. */
362 static struct obstack cand_obstack
;
364 /* Obstack for candidate chains. */
365 static struct obstack chain_obstack
;
367 /* An array INCR_VEC of incr_infos is used during analysis of related
368 candidates having an SSA name for a stride. INCR_VEC_LEN describes
369 its current length. MAX_INCR_VEC_LEN is used to avoid costly
370 pathological cases. */
371 static incr_info_t incr_vec
;
372 static unsigned incr_vec_len
;
373 const int MAX_INCR_VEC_LEN
= 16;
375 /* For a chain of candidates with unknown stride, indicates whether or not
376 we must generate pointer arithmetic when replacing statements. */
377 static bool address_arithmetic_p
;
379 /* Forward function declarations. */
380 static slsr_cand_t
base_cand_from_table (tree
);
381 static tree
introduce_cast_before_cand (slsr_cand_t
, tree
, tree
);
383 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
386 lookup_cand (cand_idx idx
)
388 return cand_vec
[idx
- 1];
391 /* Helper for hashing a candidate chain header. */
393 struct cand_chain_hasher
: typed_noop_remove
<cand_chain
>
395 typedef cand_chain value_type
;
396 typedef cand_chain compare_type
;
397 static inline hashval_t
hash (const value_type
*);
398 static inline bool equal (const value_type
*, const compare_type
*);
402 cand_chain_hasher::hash (const value_type
*p
)
404 tree base_expr
= p
->base_expr
;
405 return iterative_hash_expr (base_expr
, 0);
409 cand_chain_hasher::equal (const value_type
*chain1
, const compare_type
*chain2
)
411 return operand_equal_p (chain1
->base_expr
, chain2
->base_expr
, 0);
414 /* Hash table embodying a mapping from base exprs to chains of candidates. */
415 static hash_table
<cand_chain_hasher
> base_cand_map
;
417 /* Look in the candidate table for a CAND_PHI that defines BASE and
418 return it if found; otherwise return NULL. */
421 find_phi_def (tree base
)
425 if (TREE_CODE (base
) != SSA_NAME
)
428 c
= base_cand_from_table (base
);
430 if (!c
|| c
->kind
!= CAND_PHI
)
436 /* Helper routine for find_basis_for_candidate. May be called twice:
437 once for the candidate's base expr, and optionally again for the
438 candidate's phi definition. */
441 find_basis_for_base_expr (slsr_cand_t c
, tree base_expr
)
443 cand_chain mapping_key
;
445 slsr_cand_t basis
= NULL
;
447 // Limit potential of N^2 behavior for long candidate chains.
449 int max_iters
= PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN
);
451 mapping_key
.base_expr
= base_expr
;
452 chain
= base_cand_map
.find (&mapping_key
);
454 for (; chain
&& iters
< max_iters
; chain
= chain
->next
, ++iters
)
456 slsr_cand_t one_basis
= chain
->cand
;
458 if (one_basis
->kind
!= c
->kind
459 || one_basis
->cand_stmt
== c
->cand_stmt
460 || !operand_equal_p (one_basis
->stride
, c
->stride
, 0)
461 || !types_compatible_p (one_basis
->cand_type
, c
->cand_type
)
462 || !dominated_by_p (CDI_DOMINATORS
,
463 gimple_bb (c
->cand_stmt
),
464 gimple_bb (one_basis
->cand_stmt
)))
467 if (!basis
|| basis
->cand_num
< one_basis
->cand_num
)
474 /* Use the base expr from candidate C to look for possible candidates
475 that can serve as a basis for C. Each potential basis must also
476 appear in a block that dominates the candidate statement and have
477 the same stride and type. If more than one possible basis exists,
478 the one with highest index in the vector is chosen; this will be
479 the most immediately dominating basis. */
482 find_basis_for_candidate (slsr_cand_t c
)
484 slsr_cand_t basis
= find_basis_for_base_expr (c
, c
->base_expr
);
486 /* If a candidate doesn't have a basis using its base expression,
487 it may have a basis hidden by one or more intervening phis. */
488 if (!basis
&& c
->def_phi
)
490 basic_block basis_bb
, phi_bb
;
491 slsr_cand_t phi_cand
= lookup_cand (c
->def_phi
);
492 basis
= find_basis_for_base_expr (c
, phi_cand
->base_expr
);
496 /* A hidden basis must dominate the phi-definition of the
497 candidate's base name. */
498 phi_bb
= gimple_bb (phi_cand
->cand_stmt
);
499 basis_bb
= gimple_bb (basis
->cand_stmt
);
501 if (phi_bb
== basis_bb
502 || !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
508 /* If we found a hidden basis, estimate additional dead-code
509 savings if the phi and its feeding statements can be removed. */
510 if (basis
&& has_single_use (gimple_phi_result (phi_cand
->cand_stmt
)))
511 c
->dead_savings
+= phi_cand
->dead_savings
;
517 c
->sibling
= basis
->dependent
;
518 basis
->dependent
= c
->cand_num
;
519 return basis
->cand_num
;
525 /* Record a mapping from the base expression of C to C itself, indicating that
526 C may potentially serve as a basis using that base expression. */
529 record_potential_basis (slsr_cand_t c
)
534 node
= (cand_chain_t
) obstack_alloc (&chain_obstack
, sizeof (cand_chain
));
535 node
->base_expr
= c
->base_expr
;
538 slot
= base_cand_map
.find_slot (node
, INSERT
);
542 cand_chain_t head
= (cand_chain_t
) (*slot
);
543 node
->next
= head
->next
;
550 /* Allocate storage for a new candidate and initialize its fields.
551 Attempt to find a basis for the candidate. */
554 alloc_cand_and_find_basis (enum cand_kind kind
, gimple gs
, tree base
,
555 double_int index
, tree stride
, tree ctype
,
558 slsr_cand_t c
= (slsr_cand_t
) obstack_alloc (&cand_obstack
,
564 c
->cand_type
= ctype
;
566 c
->cand_num
= cand_vec
.length () + 1;
570 c
->def_phi
= kind
== CAND_MULT
? find_phi_def (base
) : 0;
571 c
->dead_savings
= savings
;
573 cand_vec
.safe_push (c
);
575 if (kind
== CAND_PHI
)
578 c
->basis
= find_basis_for_candidate (c
);
580 record_potential_basis (c
);
585 /* Determine the target cost of statement GS when compiling according
589 stmt_cost (gimple gs
, bool speed
)
591 tree lhs
, rhs1
, rhs2
;
592 enum machine_mode lhs_mode
;
594 gcc_assert (is_gimple_assign (gs
));
595 lhs
= gimple_assign_lhs (gs
);
596 rhs1
= gimple_assign_rhs1 (gs
);
597 lhs_mode
= TYPE_MODE (TREE_TYPE (lhs
));
599 switch (gimple_assign_rhs_code (gs
))
602 rhs2
= gimple_assign_rhs2 (gs
);
604 if (host_integerp (rhs2
, 0))
605 return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2
), lhs_mode
, speed
);
607 gcc_assert (TREE_CODE (rhs1
) != INTEGER_CST
);
608 return mul_cost (speed
, lhs_mode
);
611 case POINTER_PLUS_EXPR
:
613 return add_cost (speed
, lhs_mode
);
616 return neg_cost (speed
, lhs_mode
);
619 return convert_cost (lhs_mode
, TYPE_MODE (TREE_TYPE (rhs1
)), speed
);
621 /* Note that we don't assign costs to copies that in most cases
631 /* Look up the defining statement for BASE_IN and return a pointer
632 to its candidate in the candidate table, if any; otherwise NULL.
633 Only CAND_ADD and CAND_MULT candidates are returned. */
636 base_cand_from_table (tree base_in
)
640 gimple def
= SSA_NAME_DEF_STMT (base_in
);
642 return (slsr_cand_t
) NULL
;
644 result
= (slsr_cand_t
*) pointer_map_contains (stmt_cand_map
, def
);
646 if (result
&& (*result
)->kind
!= CAND_REF
)
649 return (slsr_cand_t
) NULL
;
652 /* Add an entry to the statement-to-candidate mapping. */
655 add_cand_for_stmt (gimple gs
, slsr_cand_t c
)
657 void **slot
= pointer_map_insert (stmt_cand_map
, gs
);
662 /* Given PHI which contains a phi statement, determine whether it
663 satisfies all the requirements of a phi candidate. If so, create
664 a candidate. Note that a CAND_PHI never has a basis itself, but
665 is used to help find a basis for subsequent candidates. */
668 slsr_process_phi (gimple phi
, bool speed
)
671 tree arg0_base
= NULL_TREE
, base_type
;
673 struct loop
*cand_loop
= gimple_bb (phi
)->loop_father
;
674 unsigned savings
= 0;
676 /* A CAND_PHI requires each of its arguments to have the same
677 derived base name. (See the module header commentary for a
678 definition of derived base names.) Furthermore, all feeding
679 definitions must be in the same position in the loop hierarchy
682 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
684 slsr_cand_t arg_cand
;
685 tree arg
= gimple_phi_arg_def (phi
, i
);
686 tree derived_base_name
= NULL_TREE
;
687 gimple arg_stmt
= NULL
;
688 basic_block arg_bb
= NULL
;
690 if (TREE_CODE (arg
) != SSA_NAME
)
693 arg_cand
= base_cand_from_table (arg
);
697 while (arg_cand
->kind
!= CAND_ADD
&& arg_cand
->kind
!= CAND_PHI
)
699 if (!arg_cand
->next_interp
)
702 arg_cand
= lookup_cand (arg_cand
->next_interp
);
705 if (!integer_onep (arg_cand
->stride
))
708 derived_base_name
= arg_cand
->base_expr
;
709 arg_stmt
= arg_cand
->cand_stmt
;
710 arg_bb
= gimple_bb (arg_stmt
);
712 /* Gather potential dead code savings if the phi statement
713 can be removed later on. */
714 if (has_single_use (arg
))
716 if (gimple_code (arg_stmt
) == GIMPLE_PHI
)
717 savings
+= arg_cand
->dead_savings
;
719 savings
+= stmt_cost (arg_stmt
, speed
);
724 derived_base_name
= arg
;
726 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
727 arg_bb
= single_succ (ENTRY_BLOCK_PTR
);
729 gimple_bb (SSA_NAME_DEF_STMT (arg
));
732 if (!arg_bb
|| arg_bb
->loop_father
!= cand_loop
)
736 arg0_base
= derived_base_name
;
737 else if (!operand_equal_p (derived_base_name
, arg0_base
, 0))
741 /* Create the candidate. "alloc_cand_and_find_basis" is named
742 misleadingly for this case, as no basis will be sought for a
744 base_type
= TREE_TYPE (arg0_base
);
746 c
= alloc_cand_and_find_basis (CAND_PHI
, phi
, arg0_base
, double_int_zero
,
747 integer_one_node
, base_type
, savings
);
749 /* Add the candidate to the statement-candidate mapping. */
750 add_cand_for_stmt (phi
, c
);
753 /* Given PBASE which is a pointer to tree, look up the defining
754 statement for it and check whether the candidate is in the
757 X = B + (1 * S), S is integer constant
758 X = B + (i * S), S is integer one
760 If so, set PBASE to the candidate's base_expr and return double
762 Otherwise, just return double int zero. */
765 backtrace_base_for_ref (tree
*pbase
)
767 tree base_in
= *pbase
;
768 slsr_cand_t base_cand
;
770 STRIP_NOPS (base_in
);
771 if (TREE_CODE (base_in
) != SSA_NAME
)
772 return tree_to_double_int (integer_zero_node
);
774 base_cand
= base_cand_from_table (base_in
);
776 while (base_cand
&& base_cand
->kind
!= CAND_PHI
)
778 if (base_cand
->kind
== CAND_ADD
779 && base_cand
->index
.is_one ()
780 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
782 /* X = B + (1 * S), S is integer constant. */
783 *pbase
= base_cand
->base_expr
;
784 return tree_to_double_int (base_cand
->stride
);
786 else if (base_cand
->kind
== CAND_ADD
787 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
788 && integer_onep (base_cand
->stride
))
790 /* X = B + (i * S), S is integer one. */
791 *pbase
= base_cand
->base_expr
;
792 return base_cand
->index
;
795 if (base_cand
->next_interp
)
796 base_cand
= lookup_cand (base_cand
->next_interp
);
801 return tree_to_double_int (integer_zero_node
);
804 /* Look for the following pattern:
806 *PBASE: MEM_REF (T1, C1)
808 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
810 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
812 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
814 *PINDEX: C4 * BITS_PER_UNIT
816 If not present, leave the input values unchanged and return FALSE.
817 Otherwise, modify the input values as follows and return TRUE:
820 *POFFSET: MULT_EXPR (T2, C3)
821 *PINDEX: C1 + (C2 * C3) + C4
823 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
824 will be further restructured to:
827 *POFFSET: MULT_EXPR (T2', C3)
828 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
831 restructure_reference (tree
*pbase
, tree
*poffset
, double_int
*pindex
,
834 tree base
= *pbase
, offset
= *poffset
;
835 double_int index
= *pindex
;
836 double_int bpu
= double_int::from_uhwi (BITS_PER_UNIT
);
837 tree mult_op0
, mult_op1
, t1
, t2
, type
;
838 double_int c1
, c2
, c3
, c4
, c5
;
842 || TREE_CODE (base
) != MEM_REF
843 || TREE_CODE (offset
) != MULT_EXPR
844 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
845 || !index
.umod (bpu
, FLOOR_MOD_EXPR
).is_zero ())
848 t1
= TREE_OPERAND (base
, 0);
849 c1
= mem_ref_offset (base
);
850 type
= TREE_TYPE (TREE_OPERAND (base
, 1));
852 mult_op0
= TREE_OPERAND (offset
, 0);
853 mult_op1
= TREE_OPERAND (offset
, 1);
855 c3
= tree_to_double_int (mult_op1
);
857 if (TREE_CODE (mult_op0
) == PLUS_EXPR
)
859 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
861 t2
= TREE_OPERAND (mult_op0
, 0);
862 c2
= tree_to_double_int (TREE_OPERAND (mult_op0
, 1));
867 else if (TREE_CODE (mult_op0
) == MINUS_EXPR
)
869 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
871 t2
= TREE_OPERAND (mult_op0
, 0);
872 c2
= -tree_to_double_int (TREE_OPERAND (mult_op0
, 1));
880 c2
= double_int_zero
;
883 c4
= index
.udiv (bpu
, FLOOR_DIV_EXPR
);
884 c5
= backtrace_base_for_ref (&t2
);
887 *poffset
= fold_build2 (MULT_EXPR
, sizetype
, fold_convert (sizetype
, t2
),
888 double_int_to_tree (sizetype
, c3
));
889 *pindex
= c1
+ c2
* c3
+ c4
+ c5
* c3
;
895 /* Given GS which contains a data reference, create a CAND_REF entry in
896 the candidate table and attempt to find a basis. */
899 slsr_process_ref (gimple gs
)
901 tree ref_expr
, base
, offset
, type
;
902 HOST_WIDE_INT bitsize
, bitpos
;
903 enum machine_mode mode
;
904 int unsignedp
, volatilep
;
908 if (gimple_vdef (gs
))
909 ref_expr
= gimple_assign_lhs (gs
);
911 ref_expr
= gimple_assign_rhs1 (gs
);
913 if (!handled_component_p (ref_expr
)
914 || TREE_CODE (ref_expr
) == BIT_FIELD_REF
915 || (TREE_CODE (ref_expr
) == COMPONENT_REF
916 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr
, 1))))
919 base
= get_inner_reference (ref_expr
, &bitsize
, &bitpos
, &offset
, &mode
,
920 &unsignedp
, &volatilep
, false);
921 index
= double_int::from_uhwi (bitpos
);
923 if (!restructure_reference (&base
, &offset
, &index
, &type
))
926 c
= alloc_cand_and_find_basis (CAND_REF
, gs
, base
, index
, offset
,
929 /* Add the candidate to the statement-candidate mapping. */
930 add_cand_for_stmt (gs
, c
);
933 /* Create a candidate entry for a statement GS, where GS multiplies
934 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
935 about the two SSA names into the new candidate. Return the new
939 create_mul_ssa_cand (gimple gs
, tree base_in
, tree stride_in
, bool speed
)
941 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
943 unsigned savings
= 0;
945 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
947 /* Look at all interpretations of the base candidate, if necessary,
948 to find information to propagate into this candidate. */
949 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
952 if (base_cand
->kind
== CAND_MULT
&& integer_onep (base_cand
->stride
))
958 base
= base_cand
->base_expr
;
959 index
= base_cand
->index
;
961 ctype
= base_cand
->cand_type
;
962 if (has_single_use (base_in
))
963 savings
= (base_cand
->dead_savings
964 + stmt_cost (base_cand
->cand_stmt
, speed
));
966 else if (base_cand
->kind
== CAND_ADD
967 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
969 /* Y = B + (i' * S), S constant
971 ============================
972 X = B + ((i' * S) * Z) */
973 base
= base_cand
->base_expr
;
974 index
= base_cand
->index
* tree_to_double_int (base_cand
->stride
);
976 ctype
= base_cand
->cand_type
;
977 if (has_single_use (base_in
))
978 savings
= (base_cand
->dead_savings
979 + stmt_cost (base_cand
->cand_stmt
, speed
));
982 if (base_cand
->next_interp
)
983 base_cand
= lookup_cand (base_cand
->next_interp
);
990 /* No interpretations had anything useful to propagate, so
991 produce X = (Y + 0) * Z. */
993 index
= double_int_zero
;
995 ctype
= TREE_TYPE (base_in
);
998 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1003 /* Create a candidate entry for a statement GS, where GS multiplies
1004 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1005 information about BASE_IN into the new candidate. Return the new
1009 create_mul_imm_cand (gimple gs
, tree base_in
, tree stride_in
, bool speed
)
1011 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1012 double_int index
, temp
;
1013 unsigned savings
= 0;
1015 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1017 /* Look at all interpretations of the base candidate, if necessary,
1018 to find information to propagate into this candidate. */
1019 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1021 if (base_cand
->kind
== CAND_MULT
1022 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1024 /* Y = (B + i') * S, S constant
1026 ============================
1027 X = (B + i') * (S * c) */
1028 base
= base_cand
->base_expr
;
1029 index
= base_cand
->index
;
1030 temp
= tree_to_double_int (base_cand
->stride
)
1031 * tree_to_double_int (stride_in
);
1032 stride
= double_int_to_tree (TREE_TYPE (stride_in
), temp
);
1033 ctype
= base_cand
->cand_type
;
1034 if (has_single_use (base_in
))
1035 savings
= (base_cand
->dead_savings
1036 + stmt_cost (base_cand
->cand_stmt
, speed
));
1038 else if (base_cand
->kind
== CAND_ADD
&& integer_onep (base_cand
->stride
))
1042 ===========================
1044 base
= base_cand
->base_expr
;
1045 index
= base_cand
->index
;
1047 ctype
= base_cand
->cand_type
;
1048 if (has_single_use (base_in
))
1049 savings
= (base_cand
->dead_savings
1050 + stmt_cost (base_cand
->cand_stmt
, speed
));
1052 else if (base_cand
->kind
== CAND_ADD
1053 && base_cand
->index
.is_one ()
1054 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1056 /* Y = B + (1 * S), S constant
1058 ===========================
1060 base
= base_cand
->base_expr
;
1061 index
= tree_to_double_int (base_cand
->stride
);
1063 ctype
= base_cand
->cand_type
;
1064 if (has_single_use (base_in
))
1065 savings
= (base_cand
->dead_savings
1066 + stmt_cost (base_cand
->cand_stmt
, speed
));
1069 if (base_cand
->next_interp
)
1070 base_cand
= lookup_cand (base_cand
->next_interp
);
1077 /* No interpretations had anything useful to propagate, so
1078 produce X = (Y + 0) * c. */
1080 index
= double_int_zero
;
1082 ctype
= TREE_TYPE (base_in
);
1085 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1090 /* Given GS which is a multiply of scalar integers, make an appropriate
1091 entry in the candidate table. If this is a multiply of two SSA names,
1092 create two CAND_MULT interpretations and attempt to find a basis for
1093 each of them. Otherwise, create a single CAND_MULT and attempt to
1097 slsr_process_mul (gimple gs
, tree rhs1
, tree rhs2
, bool speed
)
1101 /* If this is a multiply of an SSA name with itself, it is highly
1102 unlikely that we will get a strength reduction opportunity, so
1103 don't record it as a candidate. This simplifies the logic for
1104 finding a basis, so if this is removed that must be considered. */
1108 if (TREE_CODE (rhs2
) == SSA_NAME
)
1110 /* Record an interpretation of this statement in the candidate table
1111 assuming RHS1 is the base expression and RHS2 is the stride. */
1112 c
= create_mul_ssa_cand (gs
, rhs1
, rhs2
, speed
);
1114 /* Add the first interpretation to the statement-candidate mapping. */
1115 add_cand_for_stmt (gs
, c
);
1117 /* Record another interpretation of this statement assuming RHS1
1118 is the stride and RHS2 is the base expression. */
1119 c2
= create_mul_ssa_cand (gs
, rhs2
, rhs1
, speed
);
1120 c
->next_interp
= c2
->cand_num
;
1124 /* Record an interpretation for the multiply-immediate. */
1125 c
= create_mul_imm_cand (gs
, rhs1
, rhs2
, speed
);
1127 /* Add the interpretation to the statement-candidate mapping. */
1128 add_cand_for_stmt (gs
, c
);
1132 /* Create a candidate entry for a statement GS, where GS adds two
1133 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1134 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1135 information about the two SSA names into the new candidate.
1136 Return the new candidate. */
1139 create_add_ssa_cand (gimple gs
, tree base_in
, tree addend_in
,
1140 bool subtract_p
, bool speed
)
1142 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL
;
1144 unsigned savings
= 0;
1146 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1147 slsr_cand_t addend_cand
= base_cand_from_table (addend_in
);
1149 /* The most useful transformation is a multiply-immediate feeding
1150 an add or subtract. Look for that first. */
1151 while (addend_cand
&& !base
&& addend_cand
->kind
!= CAND_PHI
)
1153 if (addend_cand
->kind
== CAND_MULT
1154 && addend_cand
->index
.is_zero ()
1155 && TREE_CODE (addend_cand
->stride
) == INTEGER_CST
)
1157 /* Z = (B + 0) * S, S constant
1159 ===========================
1160 X = Y + ((+/-1 * S) * B) */
1162 index
= tree_to_double_int (addend_cand
->stride
);
1165 stride
= addend_cand
->base_expr
;
1166 ctype
= TREE_TYPE (base_in
);
1167 if (has_single_use (addend_in
))
1168 savings
= (addend_cand
->dead_savings
1169 + stmt_cost (addend_cand
->cand_stmt
, speed
));
1172 if (addend_cand
->next_interp
)
1173 addend_cand
= lookup_cand (addend_cand
->next_interp
);
1178 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1180 if (base_cand
->kind
== CAND_ADD
1181 && (base_cand
->index
.is_zero ()
1182 || operand_equal_p (base_cand
->stride
,
1183 integer_zero_node
, 0)))
1185 /* Y = B + (i' * S), i' * S = 0
1187 ============================
1188 X = B + (+/-1 * Z) */
1189 base
= base_cand
->base_expr
;
1190 index
= subtract_p
? double_int_minus_one
: double_int_one
;
1192 ctype
= base_cand
->cand_type
;
1193 if (has_single_use (base_in
))
1194 savings
= (base_cand
->dead_savings
1195 + stmt_cost (base_cand
->cand_stmt
, speed
));
1197 else if (subtract_p
)
1199 slsr_cand_t subtrahend_cand
= base_cand_from_table (addend_in
);
1201 while (subtrahend_cand
&& !base
&& subtrahend_cand
->kind
!= CAND_PHI
)
1203 if (subtrahend_cand
->kind
== CAND_MULT
1204 && subtrahend_cand
->index
.is_zero ()
1205 && TREE_CODE (subtrahend_cand
->stride
) == INTEGER_CST
)
1207 /* Z = (B + 0) * S, S constant
1209 ===========================
1210 Value: X = Y + ((-1 * S) * B) */
1212 index
= tree_to_double_int (subtrahend_cand
->stride
);
1214 stride
= subtrahend_cand
->base_expr
;
1215 ctype
= TREE_TYPE (base_in
);
1216 if (has_single_use (addend_in
))
1217 savings
= (subtrahend_cand
->dead_savings
1218 + stmt_cost (subtrahend_cand
->cand_stmt
, speed
));
1221 if (subtrahend_cand
->next_interp
)
1222 subtrahend_cand
= lookup_cand (subtrahend_cand
->next_interp
);
1224 subtrahend_cand
= NULL
;
1228 if (base_cand
->next_interp
)
1229 base_cand
= lookup_cand (base_cand
->next_interp
);
1236 /* No interpretations had anything useful to propagate, so
1237 produce X = Y + (1 * Z). */
1239 index
= subtract_p
? double_int_minus_one
: double_int_one
;
1241 ctype
= TREE_TYPE (base_in
);
1244 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, base
, index
, stride
,
1249 /* Create a candidate entry for a statement GS, where GS adds SSA
1250 name BASE_IN to constant INDEX_IN. Propagate any known information
1251 about BASE_IN into the new candidate. Return the new candidate. */
1254 create_add_imm_cand (gimple gs
, tree base_in
, double_int index_in
, bool speed
)
1256 enum cand_kind kind
= CAND_ADD
;
1257 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1258 double_int index
, multiple
;
1259 unsigned savings
= 0;
1261 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1263 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1265 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (base_cand
->stride
));
1267 if (TREE_CODE (base_cand
->stride
) == INTEGER_CST
1268 && index_in
.multiple_of (tree_to_double_int (base_cand
->stride
),
1269 unsigned_p
, &multiple
))
1271 /* Y = (B + i') * S, S constant, c = kS for some integer k
1273 ============================
1274 X = (B + (i'+ k)) * S
1276 Y = B + (i' * S), S constant, c = kS for some integer k
1278 ============================
1279 X = (B + (i'+ k)) * S */
1280 kind
= base_cand
->kind
;
1281 base
= base_cand
->base_expr
;
1282 index
= base_cand
->index
+ multiple
;
1283 stride
= base_cand
->stride
;
1284 ctype
= base_cand
->cand_type
;
1285 if (has_single_use (base_in
))
1286 savings
= (base_cand
->dead_savings
1287 + stmt_cost (base_cand
->cand_stmt
, speed
));
1290 if (base_cand
->next_interp
)
1291 base_cand
= lookup_cand (base_cand
->next_interp
);
1298 /* No interpretations had anything useful to propagate, so
1299 produce X = Y + (c * 1). */
1303 stride
= integer_one_node
;
1304 ctype
= TREE_TYPE (base_in
);
1307 c
= alloc_cand_and_find_basis (kind
, gs
, base
, index
, stride
,
1312 /* Given GS which is an add or subtract of scalar integers or pointers,
1313 make at least one appropriate entry in the candidate table. */
1316 slsr_process_add (gimple gs
, tree rhs1
, tree rhs2
, bool speed
)
1318 bool subtract_p
= gimple_assign_rhs_code (gs
) == MINUS_EXPR
;
1319 slsr_cand_t c
= NULL
, c2
;
1321 if (TREE_CODE (rhs2
) == SSA_NAME
)
1323 /* First record an interpretation assuming RHS1 is the base expression
1324 and RHS2 is the stride. But it doesn't make sense for the
1325 stride to be a pointer, so don't record a candidate in that case. */
1326 if (!POINTER_TYPE_P (TREE_TYPE (rhs2
)))
1328 c
= create_add_ssa_cand (gs
, rhs1
, rhs2
, subtract_p
, speed
);
1330 /* Add the first interpretation to the statement-candidate
1332 add_cand_for_stmt (gs
, c
);
1335 /* If the two RHS operands are identical, or this is a subtract,
1337 if (operand_equal_p (rhs1
, rhs2
, 0) || subtract_p
)
1340 /* Otherwise, record another interpretation assuming RHS2 is the
1341 base expression and RHS1 is the stride, again provided that the
1342 stride is not a pointer. */
1343 if (!POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1345 c2
= create_add_ssa_cand (gs
, rhs2
, rhs1
, false, speed
);
1347 c
->next_interp
= c2
->cand_num
;
1349 add_cand_for_stmt (gs
, c2
);
1356 /* Record an interpretation for the add-immediate. */
1357 index
= tree_to_double_int (rhs2
);
1361 c
= create_add_imm_cand (gs
, rhs1
, index
, speed
);
1363 /* Add the interpretation to the statement-candidate mapping. */
1364 add_cand_for_stmt (gs
, c
);
1368 /* Given GS which is a negate of a scalar integer, make an appropriate
1369 entry in the candidate table. A negate is equivalent to a multiply
1373 slsr_process_neg (gimple gs
, tree rhs1
, bool speed
)
1375 /* Record a CAND_MULT interpretation for the multiply by -1. */
1376 slsr_cand_t c
= create_mul_imm_cand (gs
, rhs1
, integer_minus_one_node
, speed
);
1378 /* Add the interpretation to the statement-candidate mapping. */
1379 add_cand_for_stmt (gs
, c
);
1382 /* Help function for legal_cast_p, operating on two trees. Checks
1383 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1384 for more details. */
1387 legal_cast_p_1 (tree lhs
, tree rhs
)
1389 tree lhs_type
, rhs_type
;
1390 unsigned lhs_size
, rhs_size
;
1391 bool lhs_wraps
, rhs_wraps
;
1393 lhs_type
= TREE_TYPE (lhs
);
1394 rhs_type
= TREE_TYPE (rhs
);
1395 lhs_size
= TYPE_PRECISION (lhs_type
);
1396 rhs_size
= TYPE_PRECISION (rhs_type
);
1397 lhs_wraps
= TYPE_OVERFLOW_WRAPS (lhs_type
);
1398 rhs_wraps
= TYPE_OVERFLOW_WRAPS (rhs_type
);
1400 if (lhs_size
< rhs_size
1401 || (rhs_wraps
&& !lhs_wraps
)
1402 || (rhs_wraps
&& lhs_wraps
&& rhs_size
!= lhs_size
))
1408 /* Return TRUE if GS is a statement that defines an SSA name from
1409 a conversion and is legal for us to combine with an add and multiply
1410 in the candidate table. For example, suppose we have:
1416 Without the type-cast, we would create a CAND_MULT for D with base B,
1417 index i, and stride S. We want to record this candidate only if it
1418 is equivalent to apply the type cast following the multiply:
1424 We will record the type with the candidate for D. This allows us
1425 to use a similar previous candidate as a basis. If we have earlier seen
1431 we can replace D with
1433 D = D' + (i - i') * S;
1435 But if moving the type-cast would change semantics, we mustn't do this.
1437 This is legitimate for casts from a non-wrapping integral type to
1438 any integral type of the same or larger size. It is not legitimate
1439 to convert a wrapping type to a non-wrapping type, or to a wrapping
1440 type of a different size. I.e., with a wrapping type, we must
1441 assume that the addition B + i could wrap, in which case performing
1442 the multiply before or after one of the "illegal" type casts will
1443 have different semantics. */
1446 legal_cast_p (gimple gs
, tree rhs
)
1448 if (!is_gimple_assign (gs
)
1449 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
)))
1452 return legal_cast_p_1 (gimple_assign_lhs (gs
), rhs
);
1455 /* Given GS which is a cast to a scalar integer type, determine whether
1456 the cast is legal for strength reduction. If so, make at least one
1457 appropriate entry in the candidate table. */
1460 slsr_process_cast (gimple gs
, tree rhs1
, bool speed
)
1463 slsr_cand_t base_cand
, c
, c2
;
1464 unsigned savings
= 0;
1466 if (!legal_cast_p (gs
, rhs1
))
1469 lhs
= gimple_assign_lhs (gs
);
1470 base_cand
= base_cand_from_table (rhs1
);
1471 ctype
= TREE_TYPE (lhs
);
1473 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1477 /* Propagate all data from the base candidate except the type,
1478 which comes from the cast, and the base candidate's cast,
1479 which is no longer applicable. */
1480 if (has_single_use (rhs1
))
1481 savings
= (base_cand
->dead_savings
1482 + stmt_cost (base_cand
->cand_stmt
, speed
));
1484 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1485 base_cand
->base_expr
,
1486 base_cand
->index
, base_cand
->stride
,
1488 if (base_cand
->next_interp
)
1489 base_cand
= lookup_cand (base_cand
->next_interp
);
1496 /* If nothing is known about the RHS, create fresh CAND_ADD and
1497 CAND_MULT interpretations:
1502 The first of these is somewhat arbitrary, but the choice of
1503 1 for the stride simplifies the logic for propagating casts
1505 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, double_int_zero
,
1506 integer_one_node
, ctype
, 0);
1507 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, double_int_zero
,
1508 integer_one_node
, ctype
, 0);
1509 c
->next_interp
= c2
->cand_num
;
1512 /* Add the first (or only) interpretation to the statement-candidate
1514 add_cand_for_stmt (gs
, c
);
1517 /* Given GS which is a copy of a scalar integer type, make at least one
1518 appropriate entry in the candidate table.
1520 This interface is included for completeness, but is unnecessary
1521 if this pass immediately follows a pass that performs copy
1522 propagation, such as DOM. */
1525 slsr_process_copy (gimple gs
, tree rhs1
, bool speed
)
1527 slsr_cand_t base_cand
, c
, c2
;
1528 unsigned savings
= 0;
1530 base_cand
= base_cand_from_table (rhs1
);
1532 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1536 /* Propagate all data from the base candidate. */
1537 if (has_single_use (rhs1
))
1538 savings
= (base_cand
->dead_savings
1539 + stmt_cost (base_cand
->cand_stmt
, speed
));
1541 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1542 base_cand
->base_expr
,
1543 base_cand
->index
, base_cand
->stride
,
1544 base_cand
->cand_type
, savings
);
1545 if (base_cand
->next_interp
)
1546 base_cand
= lookup_cand (base_cand
->next_interp
);
1553 /* If nothing is known about the RHS, create fresh CAND_ADD and
1554 CAND_MULT interpretations:
1559 The first of these is somewhat arbitrary, but the choice of
1560 1 for the stride simplifies the logic for propagating casts
1562 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, double_int_zero
,
1563 integer_one_node
, TREE_TYPE (rhs1
), 0);
1564 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, double_int_zero
,
1565 integer_one_node
, TREE_TYPE (rhs1
), 0);
1566 c
->next_interp
= c2
->cand_num
;
1569 /* Add the first (or only) interpretation to the statement-candidate
1571 add_cand_for_stmt (gs
, c
);
1574 class find_candidates_dom_walker
: public dom_walker
1577 find_candidates_dom_walker (cdi_direction direction
)
1578 : dom_walker (direction
) {}
1579 virtual void before_dom_children (basic_block
);
1582 /* Find strength-reduction candidates in block BB. */
1585 find_candidates_dom_walker::before_dom_children (basic_block bb
)
1587 bool speed
= optimize_bb_for_speed_p (bb
);
1588 gimple_stmt_iterator gsi
;
1590 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1591 slsr_process_phi (gsi_stmt (gsi
), speed
);
1593 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1595 gimple gs
= gsi_stmt (gsi
);
1597 if (gimple_vuse (gs
) && gimple_assign_single_p (gs
))
1598 slsr_process_ref (gs
);
1600 else if (is_gimple_assign (gs
)
1601 && SCALAR_INT_MODE_P
1602 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))))
1604 tree rhs1
= NULL_TREE
, rhs2
= NULL_TREE
;
1606 switch (gimple_assign_rhs_code (gs
))
1610 rhs1
= gimple_assign_rhs1 (gs
);
1611 rhs2
= gimple_assign_rhs2 (gs
);
1612 /* Should never happen, but currently some buggy situations
1613 in earlier phases put constants in rhs1. */
1614 if (TREE_CODE (rhs1
) != SSA_NAME
)
1618 /* Possible future opportunity: rhs1 of a ptr+ can be
1620 case POINTER_PLUS_EXPR
:
1622 rhs2
= gimple_assign_rhs2 (gs
);
1628 rhs1
= gimple_assign_rhs1 (gs
);
1629 if (TREE_CODE (rhs1
) != SSA_NAME
)
1637 switch (gimple_assign_rhs_code (gs
))
1640 slsr_process_mul (gs
, rhs1
, rhs2
, speed
);
1644 case POINTER_PLUS_EXPR
:
1646 slsr_process_add (gs
, rhs1
, rhs2
, speed
);
1650 slsr_process_neg (gs
, rhs1
, speed
);
1654 slsr_process_cast (gs
, rhs1
, speed
);
1658 slsr_process_copy (gs
, rhs1
, speed
);
1668 /* Dump a candidate for debug. */
1671 dump_candidate (slsr_cand_t c
)
1673 fprintf (dump_file
, "%3d [%d] ", c
->cand_num
,
1674 gimple_bb (c
->cand_stmt
)->index
);
1675 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1679 fputs (" MULT : (", dump_file
);
1680 print_generic_expr (dump_file
, c
->base_expr
, 0);
1681 fputs (" + ", dump_file
);
1682 dump_double_int (dump_file
, c
->index
, false);
1683 fputs (") * ", dump_file
);
1684 print_generic_expr (dump_file
, c
->stride
, 0);
1685 fputs (" : ", dump_file
);
1688 fputs (" ADD : ", dump_file
);
1689 print_generic_expr (dump_file
, c
->base_expr
, 0);
1690 fputs (" + (", dump_file
);
1691 dump_double_int (dump_file
, c
->index
, false);
1692 fputs (" * ", dump_file
);
1693 print_generic_expr (dump_file
, c
->stride
, 0);
1694 fputs (") : ", dump_file
);
1697 fputs (" REF : ", dump_file
);
1698 print_generic_expr (dump_file
, c
->base_expr
, 0);
1699 fputs (" + (", dump_file
);
1700 print_generic_expr (dump_file
, c
->stride
, 0);
1701 fputs (") + ", dump_file
);
1702 dump_double_int (dump_file
, c
->index
, false);
1703 fputs (" : ", dump_file
);
1706 fputs (" PHI : ", dump_file
);
1707 print_generic_expr (dump_file
, c
->base_expr
, 0);
1708 fputs (" + (unknown * ", dump_file
);
1709 print_generic_expr (dump_file
, c
->stride
, 0);
1710 fputs (") : ", dump_file
);
1715 print_generic_expr (dump_file
, c
->cand_type
, 0);
1716 fprintf (dump_file
, "\n basis: %d dependent: %d sibling: %d\n",
1717 c
->basis
, c
->dependent
, c
->sibling
);
1718 fprintf (dump_file
, " next-interp: %d dead-savings: %d\n",
1719 c
->next_interp
, c
->dead_savings
);
1721 fprintf (dump_file
, " phi: %d\n", c
->def_phi
);
1722 fputs ("\n", dump_file
);
1725 /* Dump the candidate vector for debug. */
1728 dump_cand_vec (void)
1733 fprintf (dump_file
, "\nStrength reduction candidate vector:\n\n");
1735 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
1739 /* Callback used to dump the candidate chains hash table. */
1742 ssa_base_cand_dump_callback (cand_chain
**slot
, void *ignored ATTRIBUTE_UNUSED
)
1744 const_cand_chain_t chain
= *slot
;
1747 print_generic_expr (dump_file
, chain
->base_expr
, 0);
1748 fprintf (dump_file
, " -> %d", chain
->cand
->cand_num
);
1750 for (p
= chain
->next
; p
; p
= p
->next
)
1751 fprintf (dump_file
, " -> %d", p
->cand
->cand_num
);
1753 fputs ("\n", dump_file
);
1757 /* Dump the candidate chains. */
1760 dump_cand_chains (void)
1762 fprintf (dump_file
, "\nStrength reduction candidate chains:\n\n");
1763 base_cand_map
.traverse_noresize
<void *, ssa_base_cand_dump_callback
> (NULL
);
1764 fputs ("\n", dump_file
);
1767 /* Dump the increment vector for debug. */
1770 dump_incr_vec (void)
1772 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1776 fprintf (dump_file
, "\nIncrement vector:\n\n");
1778 for (i
= 0; i
< incr_vec_len
; i
++)
1780 fprintf (dump_file
, "%3d increment: ", i
);
1781 dump_double_int (dump_file
, incr_vec
[i
].incr
, false);
1782 fprintf (dump_file
, "\n count: %d", incr_vec
[i
].count
);
1783 fprintf (dump_file
, "\n cost: %d", incr_vec
[i
].cost
);
1784 fputs ("\n initializer: ", dump_file
);
1785 print_generic_expr (dump_file
, incr_vec
[i
].initializer
, 0);
1786 fputs ("\n\n", dump_file
);
1791 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1795 replace_ref (tree
*expr
, slsr_cand_t c
)
1797 tree add_expr
, mem_ref
, acc_type
= TREE_TYPE (*expr
);
1798 unsigned HOST_WIDE_INT misalign
;
1801 /* Ensure the memory reference carries the minimum alignment
1802 requirement for the data type. See PR58041. */
1803 get_object_alignment_1 (*expr
, &align
, &misalign
);
1805 align
= (misalign
& -misalign
);
1806 if (align
< TYPE_ALIGN (acc_type
))
1807 acc_type
= build_aligned_type (acc_type
, align
);
1809 add_expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (c
->base_expr
),
1810 c
->base_expr
, c
->stride
);
1811 mem_ref
= fold_build2 (MEM_REF
, acc_type
, add_expr
,
1812 double_int_to_tree (c
->cand_type
, c
->index
));
1814 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1815 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1816 TREE_OPERAND (mem_ref
, 0)
1817 = force_gimple_operand_gsi (&gsi
, TREE_OPERAND (mem_ref
, 0),
1818 /*simple_p=*/true, NULL
,
1819 /*before=*/true, GSI_SAME_STMT
);
1820 copy_ref_info (mem_ref
, *expr
);
1822 update_stmt (c
->cand_stmt
);
1825 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1826 dependent of candidate C with an equivalent strength-reduced data
1830 replace_refs (slsr_cand_t c
)
1832 if (gimple_vdef (c
->cand_stmt
))
1834 tree
*lhs
= gimple_assign_lhs_ptr (c
->cand_stmt
);
1835 replace_ref (lhs
, c
);
1839 tree
*rhs
= gimple_assign_rhs1_ptr (c
->cand_stmt
);
1840 replace_ref (rhs
, c
);
1844 replace_refs (lookup_cand (c
->sibling
));
1847 replace_refs (lookup_cand (c
->dependent
));
1850 /* Return TRUE if candidate C is dependent upon a PHI. */
1853 phi_dependent_cand_p (slsr_cand_t c
)
1855 /* A candidate is not necessarily dependent upon a PHI just because
1856 it has a phi definition for its base name. It may have a basis
1857 that relies upon the same phi definition, in which case the PHI
1858 is irrelevant to this candidate. */
1861 && lookup_cand (c
->basis
)->def_phi
!= c
->def_phi
);
1864 /* Calculate the increment required for candidate C relative to
1868 cand_increment (slsr_cand_t c
)
1872 /* If the candidate doesn't have a basis, just return its own
1873 index. This is useful in record_increments to help us find
1874 an existing initializer. Also, if the candidate's basis is
1875 hidden by a phi, then its own index will be the increment
1876 from the newly introduced phi basis. */
1877 if (!c
->basis
|| phi_dependent_cand_p (c
))
1880 basis
= lookup_cand (c
->basis
);
1881 gcc_assert (operand_equal_p (c
->base_expr
, basis
->base_expr
, 0));
1882 return c
->index
- basis
->index
;
1885 /* Calculate the increment required for candidate C relative to
1886 its basis. If we aren't going to generate pointer arithmetic
1887 for this candidate, return the absolute value of that increment
1890 static inline double_int
1891 cand_abs_increment (slsr_cand_t c
)
1893 double_int increment
= cand_increment (c
);
1895 if (!address_arithmetic_p
&& increment
.is_negative ())
1896 increment
= -increment
;
1901 /* Return TRUE iff candidate C has already been replaced under
1902 another interpretation. */
1905 cand_already_replaced (slsr_cand_t c
)
1907 return (gimple_bb (c
->cand_stmt
) == 0);
1910 /* Common logic used by replace_unconditional_candidate and
1911 replace_conditional_candidate. */
1914 replace_mult_candidate (slsr_cand_t c
, tree basis_name
, double_int bump
)
1916 tree target_type
= TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
));
1917 enum tree_code cand_code
= gimple_assign_rhs_code (c
->cand_stmt
);
1919 /* It is highly unlikely, but possible, that the resulting
1920 bump doesn't fit in a HWI. Abandon the replacement
1921 in this case. This does not affect siblings or dependents
1922 of C. Restriction to signed HWI is conservative for unsigned
1923 types but allows for safe negation without twisted logic. */
1924 if (bump
.fits_shwi ()
1925 && bump
.to_shwi () != HOST_WIDE_INT_MIN
1926 /* It is not useful to replace casts, copies, or adds of
1927 an SSA name and a constant. */
1928 && cand_code
!= MODIFY_EXPR
1929 && cand_code
!= NOP_EXPR
1930 && cand_code
!= PLUS_EXPR
1931 && cand_code
!= POINTER_PLUS_EXPR
1932 && cand_code
!= MINUS_EXPR
)
1934 enum tree_code code
= PLUS_EXPR
;
1936 gimple stmt_to_print
= NULL
;
1938 /* If the basis name and the candidate's LHS have incompatible
1939 types, introduce a cast. */
1940 if (!useless_type_conversion_p (target_type
, TREE_TYPE (basis_name
)))
1941 basis_name
= introduce_cast_before_cand (c
, target_type
, basis_name
);
1942 if (bump
.is_negative ())
1948 bump_tree
= double_int_to_tree (target_type
, bump
);
1950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1952 fputs ("Replacing: ", dump_file
);
1953 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1956 if (bump
.is_zero ())
1958 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
1959 gimple copy_stmt
= gimple_build_assign (lhs
, basis_name
);
1960 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1961 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
1962 gsi_replace (&gsi
, copy_stmt
, false);
1963 c
->cand_stmt
= copy_stmt
;
1964 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1965 stmt_to_print
= copy_stmt
;
1970 if (cand_code
!= NEGATE_EXPR
) {
1971 rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
1972 rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
1974 if (cand_code
!= NEGATE_EXPR
1975 && ((operand_equal_p (rhs1
, basis_name
, 0)
1976 && operand_equal_p (rhs2
, bump_tree
, 0))
1977 || (operand_equal_p (rhs1
, bump_tree
, 0)
1978 && operand_equal_p (rhs2
, basis_name
, 0))))
1980 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1982 fputs ("(duplicate, not actually replacing)", dump_file
);
1983 stmt_to_print
= c
->cand_stmt
;
1988 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1989 gimple_assign_set_rhs_with_ops (&gsi
, code
,
1990 basis_name
, bump_tree
);
1991 update_stmt (gsi_stmt (gsi
));
1992 c
->cand_stmt
= gsi_stmt (gsi
);
1993 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1994 stmt_to_print
= gsi_stmt (gsi
);
1998 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2000 fputs ("With: ", dump_file
);
2001 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
2002 fputs ("\n", dump_file
);
2007 /* Replace candidate C with an add or subtract. Note that we only
2008 operate on CAND_MULTs with known strides, so we will never generate
2009 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2010 X = Y + ((i - i') * S), as described in the module commentary. The
2011 folded value ((i - i') * S) is referred to here as the "bump." */
2014 replace_unconditional_candidate (slsr_cand_t c
)
2017 double_int stride
, bump
;
2019 if (cand_already_replaced (c
))
2022 basis
= lookup_cand (c
->basis
);
2023 stride
= tree_to_double_int (c
->stride
);
2024 bump
= cand_increment (c
) * stride
;
2026 replace_mult_candidate (c
, gimple_assign_lhs (basis
->cand_stmt
), bump
);
2029 /* Return the index in the increment vector of the given INCREMENT,
2030 or -1 if not found. The latter can occur if more than
2031 MAX_INCR_VEC_LEN increments have been found. */
2034 incr_vec_index (double_int increment
)
2038 for (i
= 0; i
< incr_vec_len
&& increment
!= incr_vec
[i
].incr
; i
++)
2041 if (i
< incr_vec_len
)
2047 /* Create a new statement along edge E to add BASIS_NAME to the product
2048 of INCREMENT and the stride of candidate C. Create and return a new
2049 SSA name from *VAR to be used as the LHS of the new statement.
2050 KNOWN_STRIDE is true iff C's stride is a constant. */
2053 create_add_on_incoming_edge (slsr_cand_t c
, tree basis_name
,
2054 double_int increment
, edge e
, location_t loc
,
2057 basic_block insert_bb
;
2058 gimple_stmt_iterator gsi
;
2059 tree lhs
, basis_type
;
2062 /* If the add candidate along this incoming edge has the same
2063 index as C's hidden basis, the hidden basis represents this
2065 if (increment
.is_zero ())
2068 basis_type
= TREE_TYPE (basis_name
);
2069 lhs
= make_temp_ssa_name (basis_type
, NULL
, "slsr");
2074 enum tree_code code
= PLUS_EXPR
;
2075 double_int bump
= increment
* tree_to_double_int (c
->stride
);
2076 if (bump
.is_negative ())
2082 bump_tree
= double_int_to_tree (basis_type
, bump
);
2083 new_stmt
= gimple_build_assign_with_ops (code
, lhs
, basis_name
,
2089 bool negate_incr
= (!address_arithmetic_p
&& increment
.is_negative ());
2090 i
= incr_vec_index (negate_incr
? -increment
: increment
);
2091 gcc_assert (i
>= 0);
2093 if (incr_vec
[i
].initializer
)
2095 enum tree_code code
= negate_incr
? MINUS_EXPR
: PLUS_EXPR
;
2096 new_stmt
= gimple_build_assign_with_ops (code
, lhs
, basis_name
,
2097 incr_vec
[i
].initializer
);
2099 else if (increment
.is_one ())
2100 new_stmt
= gimple_build_assign_with_ops (PLUS_EXPR
, lhs
, basis_name
,
2102 else if (increment
.is_minus_one ())
2103 new_stmt
= gimple_build_assign_with_ops (MINUS_EXPR
, lhs
, basis_name
,
2109 insert_bb
= single_succ_p (e
->src
) ? e
->src
: split_edge (e
);
2110 gsi
= gsi_last_bb (insert_bb
);
2112 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
2113 gsi_insert_before (&gsi
, new_stmt
, GSI_NEW_STMT
);
2115 gsi_insert_after (&gsi
, new_stmt
, GSI_NEW_STMT
);
2117 gimple_set_location (new_stmt
, loc
);
2119 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2121 fprintf (dump_file
, "Inserting in block %d: ", insert_bb
->index
);
2122 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
2128 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2129 is hidden by the phi node FROM_PHI, create a new phi node in the same
2130 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2131 with its phi arguments representing conditional adjustments to the
2132 hidden basis along conditional incoming paths. Those adjustments are
2133 made by creating add statements (and sometimes recursively creating
2134 phis) along those incoming paths. LOC is the location to attach to
2135 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2139 create_phi_basis (slsr_cand_t c
, gimple from_phi
, tree basis_name
,
2140 location_t loc
, bool known_stride
)
2146 slsr_cand_t basis
= lookup_cand (c
->basis
);
2147 int nargs
= gimple_phi_num_args (from_phi
);
2148 basic_block phi_bb
= gimple_bb (from_phi
);
2149 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (from_phi
));
2150 phi_args
.create (nargs
);
2152 /* Process each argument of the existing phi that represents
2153 conditionally-executed add candidates. */
2154 for (i
= 0; i
< nargs
; i
++)
2156 edge e
= (*phi_bb
->preds
)[i
];
2157 tree arg
= gimple_phi_arg_def (from_phi
, i
);
2160 /* If the phi argument is the base name of the CAND_PHI, then
2161 this incoming arc should use the hidden basis. */
2162 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2163 if (basis
->index
.is_zero ())
2164 feeding_def
= gimple_assign_lhs (basis
->cand_stmt
);
2167 double_int incr
= -basis
->index
;
2168 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, incr
,
2169 e
, loc
, known_stride
);
2173 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2175 /* If there is another phi along this incoming edge, we must
2176 process it in the same fashion to ensure that all basis
2177 adjustments are made along its incoming edges. */
2178 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2179 feeding_def
= create_phi_basis (c
, arg_def
, basis_name
,
2183 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2184 double_int diff
= arg_cand
->index
- basis
->index
;
2185 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, diff
,
2186 e
, loc
, known_stride
);
2190 /* Because of recursion, we need to save the arguments in a vector
2191 so we can create the PHI statement all at once. Otherwise the
2192 storage for the half-created PHI can be reclaimed. */
2193 phi_args
.safe_push (feeding_def
);
2196 /* Create the new phi basis. */
2197 name
= make_temp_ssa_name (TREE_TYPE (basis_name
), NULL
, "slsr");
2198 phi
= create_phi_node (name
, phi_bb
);
2199 SSA_NAME_DEF_STMT (name
) = phi
;
2201 FOR_EACH_VEC_ELT (phi_args
, i
, phi_arg
)
2203 edge e
= (*phi_bb
->preds
)[i
];
2204 add_phi_arg (phi
, phi_arg
, e
, loc
);
2209 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2211 fputs ("Introducing new phi basis: ", dump_file
);
2212 print_gimple_stmt (dump_file
, phi
, 0, 0);
2218 /* Given a candidate C whose basis is hidden by at least one intervening
2219 phi, introduce a matching number of new phis to represent its basis
2220 adjusted by conditional increments along possible incoming paths. Then
2221 replace C as though it were an unconditional candidate, using the new
2225 replace_conditional_candidate (slsr_cand_t c
)
2227 tree basis_name
, name
;
2230 double_int stride
, bump
;
2232 /* Look up the LHS SSA name from C's basis. This will be the
2233 RHS1 of the adds we will introduce to create new phi arguments. */
2234 basis
= lookup_cand (c
->basis
);
2235 basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
2237 /* Create a new phi statement which will represent C's true basis
2238 after the transformation is complete. */
2239 loc
= gimple_location (c
->cand_stmt
);
2240 name
= create_phi_basis (c
, lookup_cand (c
->def_phi
)->cand_stmt
,
2241 basis_name
, loc
, KNOWN_STRIDE
);
2242 /* Replace C with an add of the new basis phi and a constant. */
2243 stride
= tree_to_double_int (c
->stride
);
2244 bump
= c
->index
* stride
;
2246 replace_mult_candidate (c
, name
, bump
);
2249 /* Compute the expected costs of inserting basis adjustments for
2250 candidate C with phi-definition PHI. The cost of inserting
2251 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2252 which are themselves phi results, recursively calculate costs
2253 for those phis as well. */
2256 phi_add_costs (gimple phi
, slsr_cand_t c
, int one_add_cost
)
2260 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2262 /* If we work our way back to a phi that isn't dominated by the hidden
2263 basis, this isn't a candidate for replacement. Indicate this by
2264 returning an unreasonably high cost. It's not easy to detect
2265 these situations when determining the basis, so we defer the
2266 decision until now. */
2267 basic_block phi_bb
= gimple_bb (phi
);
2268 slsr_cand_t basis
= lookup_cand (c
->basis
);
2269 basic_block basis_bb
= gimple_bb (basis
->cand_stmt
);
2271 if (phi_bb
== basis_bb
|| !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
2272 return COST_INFINITE
;
2274 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2276 tree arg
= gimple_phi_arg_def (phi
, i
);
2278 if (arg
!= phi_cand
->base_expr
)
2280 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2282 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2283 cost
+= phi_add_costs (arg_def
, c
, one_add_cost
);
2286 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2288 if (arg_cand
->index
!= c
->index
)
2289 cost
+= one_add_cost
;
2297 /* For candidate C, each sibling of candidate C, and each dependent of
2298 candidate C, determine whether the candidate is dependent upon a
2299 phi that hides its basis. If not, replace the candidate unconditionally.
2300 Otherwise, determine whether the cost of introducing compensation code
2301 for the candidate is offset by the gains from strength reduction. If
2302 so, replace the candidate and introduce the compensation code. */
2305 replace_uncond_cands_and_profitable_phis (slsr_cand_t c
)
2307 if (phi_dependent_cand_p (c
))
2309 if (c
->kind
== CAND_MULT
)
2311 /* A candidate dependent upon a phi will replace a multiply by
2312 a constant with an add, and will insert at most one add for
2313 each phi argument. Add these costs with the potential dead-code
2314 savings to determine profitability. */
2315 bool speed
= optimize_bb_for_speed_p (gimple_bb (c
->cand_stmt
));
2316 int mult_savings
= stmt_cost (c
->cand_stmt
, speed
);
2317 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2318 tree phi_result
= gimple_phi_result (phi
);
2319 int one_add_cost
= add_cost (speed
,
2320 TYPE_MODE (TREE_TYPE (phi_result
)));
2321 int add_costs
= one_add_cost
+ phi_add_costs (phi
, c
, one_add_cost
);
2322 int cost
= add_costs
- mult_savings
- c
->dead_savings
;
2324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2326 fprintf (dump_file
, " Conditional candidate %d:\n", c
->cand_num
);
2327 fprintf (dump_file
, " add_costs = %d\n", add_costs
);
2328 fprintf (dump_file
, " mult_savings = %d\n", mult_savings
);
2329 fprintf (dump_file
, " dead_savings = %d\n", c
->dead_savings
);
2330 fprintf (dump_file
, " cost = %d\n", cost
);
2331 if (cost
<= COST_NEUTRAL
)
2332 fputs (" Replacing...\n", dump_file
);
2334 fputs (" Not replaced.\n", dump_file
);
2337 if (cost
<= COST_NEUTRAL
)
2338 replace_conditional_candidate (c
);
2342 replace_unconditional_candidate (c
);
2345 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->sibling
));
2348 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->dependent
));
2351 /* Count the number of candidates in the tree rooted at C that have
2352 not already been replaced under other interpretations. */
2355 count_candidates (slsr_cand_t c
)
2357 unsigned count
= cand_already_replaced (c
) ? 0 : 1;
2360 count
+= count_candidates (lookup_cand (c
->sibling
));
2363 count
+= count_candidates (lookup_cand (c
->dependent
));
2368 /* Increase the count of INCREMENT by one in the increment vector.
2369 INCREMENT is associated with candidate C. If INCREMENT is to be
2370 conditionally executed as part of a conditional candidate replacement,
2371 IS_PHI_ADJUST is true, otherwise false. If an initializer
2372 T_0 = stride * I is provided by a candidate that dominates all
2373 candidates with the same increment, also record T_0 for subsequent use. */
2376 record_increment (slsr_cand_t c
, double_int increment
, bool is_phi_adjust
)
2381 /* Treat increments that differ only in sign as identical so as to
2382 share initializers, unless we are generating pointer arithmetic. */
2383 if (!address_arithmetic_p
&& increment
.is_negative ())
2384 increment
= -increment
;
2386 for (i
= 0; i
< incr_vec_len
; i
++)
2388 if (incr_vec
[i
].incr
== increment
)
2390 incr_vec
[i
].count
++;
2393 /* If we previously recorded an initializer that doesn't
2394 dominate this candidate, it's not going to be useful to
2396 if (incr_vec
[i
].initializer
2397 && !dominated_by_p (CDI_DOMINATORS
,
2398 gimple_bb (c
->cand_stmt
),
2399 incr_vec
[i
].init_bb
))
2401 incr_vec
[i
].initializer
= NULL_TREE
;
2402 incr_vec
[i
].init_bb
= NULL
;
2409 if (!found
&& incr_vec_len
< MAX_INCR_VEC_LEN
- 1)
2411 /* The first time we see an increment, create the entry for it.
2412 If this is the root candidate which doesn't have a basis, set
2413 the count to zero. We're only processing it so it can possibly
2414 provide an initializer for other candidates. */
2415 incr_vec
[incr_vec_len
].incr
= increment
;
2416 incr_vec
[incr_vec_len
].count
= c
->basis
|| is_phi_adjust
? 1 : 0;
2417 incr_vec
[incr_vec_len
].cost
= COST_INFINITE
;
2419 /* Optimistically record the first occurrence of this increment
2420 as providing an initializer (if it does); we will revise this
2421 opinion later if it doesn't dominate all other occurrences.
2422 Exception: increments of -1, 0, 1 never need initializers;
2423 and phi adjustments don't ever provide initializers. */
2424 if (c
->kind
== CAND_ADD
2426 && c
->index
== increment
2427 && (increment
.sgt (double_int_one
)
2428 || increment
.slt (double_int_minus_one
))
2429 && (gimple_assign_rhs_code (c
->cand_stmt
) == PLUS_EXPR
2430 || gimple_assign_rhs_code (c
->cand_stmt
) == POINTER_PLUS_EXPR
))
2432 tree t0
= NULL_TREE
;
2433 tree rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2434 tree rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2435 if (operand_equal_p (rhs1
, c
->base_expr
, 0))
2437 else if (operand_equal_p (rhs2
, c
->base_expr
, 0))
2440 && SSA_NAME_DEF_STMT (t0
)
2441 && gimple_bb (SSA_NAME_DEF_STMT (t0
)))
2443 incr_vec
[incr_vec_len
].initializer
= t0
;
2444 incr_vec
[incr_vec_len
++].init_bb
2445 = gimple_bb (SSA_NAME_DEF_STMT (t0
));
2449 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2450 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2455 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2456 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2461 /* Given phi statement PHI that hides a candidate from its BASIS, find
2462 the increments along each incoming arc (recursively handling additional
2463 phis that may be present) and record them. These increments are the
2464 difference in index between the index-adjusting statements and the
2465 index of the basis. */
2468 record_phi_increments (slsr_cand_t basis
, gimple phi
)
2471 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2473 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2475 tree arg
= gimple_phi_arg_def (phi
, i
);
2477 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2479 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2481 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2482 record_phi_increments (basis
, arg_def
);
2485 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2486 double_int diff
= arg_cand
->index
- basis
->index
;
2487 record_increment (arg_cand
, diff
, PHI_ADJUST
);
2493 /* Determine how many times each unique increment occurs in the set
2494 of candidates rooted at C's parent, recording the data in the
2495 increment vector. For each unique increment I, if an initializer
2496 T_0 = stride * I is provided by a candidate that dominates all
2497 candidates with the same increment, also record T_0 for subsequent
2501 record_increments (slsr_cand_t c
)
2503 if (!cand_already_replaced (c
))
2505 if (!phi_dependent_cand_p (c
))
2506 record_increment (c
, cand_increment (c
), NOT_PHI_ADJUST
);
2509 /* A candidate with a basis hidden by a phi will have one
2510 increment for its relationship to the index represented by
2511 the phi, and potentially additional increments along each
2512 incoming edge. For the root of the dependency tree (which
2513 has no basis), process just the initial index in case it has
2514 an initializer that can be used by subsequent candidates. */
2515 record_increment (c
, c
->index
, NOT_PHI_ADJUST
);
2518 record_phi_increments (lookup_cand (c
->basis
),
2519 lookup_cand (c
->def_phi
)->cand_stmt
);
2524 record_increments (lookup_cand (c
->sibling
));
2527 record_increments (lookup_cand (c
->dependent
));
2530 /* Add up and return the costs of introducing add statements that
2531 require the increment INCR on behalf of candidate C and phi
2532 statement PHI. Accumulate into *SAVINGS the potential savings
2533 from removing existing statements that feed PHI and have no other
2537 phi_incr_cost (slsr_cand_t c
, double_int incr
, gimple phi
, int *savings
)
2541 slsr_cand_t basis
= lookup_cand (c
->basis
);
2542 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2544 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2546 tree arg
= gimple_phi_arg_def (phi
, i
);
2548 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2550 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2552 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2554 int feeding_savings
= 0;
2555 cost
+= phi_incr_cost (c
, incr
, arg_def
, &feeding_savings
);
2556 if (has_single_use (gimple_phi_result (arg_def
)))
2557 *savings
+= feeding_savings
;
2561 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2562 double_int diff
= arg_cand
->index
- basis
->index
;
2566 tree basis_lhs
= gimple_assign_lhs (basis
->cand_stmt
);
2567 tree lhs
= gimple_assign_lhs (arg_cand
->cand_stmt
);
2568 cost
+= add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs
)));
2569 if (has_single_use (lhs
))
2570 *savings
+= stmt_cost (arg_cand
->cand_stmt
, true);
2579 /* Return the first candidate in the tree rooted at C that has not
2580 already been replaced, favoring siblings over dependents. */
2583 unreplaced_cand_in_tree (slsr_cand_t c
)
2585 if (!cand_already_replaced (c
))
2590 slsr_cand_t sib
= unreplaced_cand_in_tree (lookup_cand (c
->sibling
));
2597 slsr_cand_t dep
= unreplaced_cand_in_tree (lookup_cand (c
->dependent
));
2605 /* Return TRUE if the candidates in the tree rooted at C should be
2606 optimized for speed, else FALSE. We estimate this based on the block
2607 containing the most dominant candidate in the tree that has not yet
2611 optimize_cands_for_speed_p (slsr_cand_t c
)
2613 slsr_cand_t c2
= unreplaced_cand_in_tree (c
);
2615 return optimize_bb_for_speed_p (gimple_bb (c2
->cand_stmt
));
2618 /* Add COST_IN to the lowest cost of any dependent path starting at
2619 candidate C or any of its siblings, counting only candidates along
2620 such paths with increment INCR. Assume that replacing a candidate
2621 reduces cost by REPL_SAVINGS. Also account for savings from any
2622 statements that would go dead. If COUNT_PHIS is true, include
2623 costs of introducing feeding statements for conditional candidates. */
2626 lowest_cost_path (int cost_in
, int repl_savings
, slsr_cand_t c
,
2627 double_int incr
, bool count_phis
)
2629 int local_cost
, sib_cost
, savings
= 0;
2630 double_int cand_incr
= cand_abs_increment (c
);
2632 if (cand_already_replaced (c
))
2633 local_cost
= cost_in
;
2634 else if (incr
== cand_incr
)
2635 local_cost
= cost_in
- repl_savings
- c
->dead_savings
;
2637 local_cost
= cost_in
- c
->dead_savings
;
2640 && phi_dependent_cand_p (c
)
2641 && !cand_already_replaced (c
))
2643 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2644 local_cost
+= phi_incr_cost (c
, incr
, phi
, &savings
);
2646 if (has_single_use (gimple_phi_result (phi
)))
2647 local_cost
-= savings
;
2651 local_cost
= lowest_cost_path (local_cost
, repl_savings
,
2652 lookup_cand (c
->dependent
), incr
,
2657 sib_cost
= lowest_cost_path (cost_in
, repl_savings
,
2658 lookup_cand (c
->sibling
), incr
,
2660 local_cost
= MIN (local_cost
, sib_cost
);
2666 /* Compute the total savings that would accrue from all replacements
2667 in the candidate tree rooted at C, counting only candidates with
2668 increment INCR. Assume that replacing a candidate reduces cost
2669 by REPL_SAVINGS. Also account for savings from statements that
2673 total_savings (int repl_savings
, slsr_cand_t c
, double_int incr
,
2677 double_int cand_incr
= cand_abs_increment (c
);
2679 if (incr
== cand_incr
&& !cand_already_replaced (c
))
2680 savings
+= repl_savings
+ c
->dead_savings
;
2683 && phi_dependent_cand_p (c
)
2684 && !cand_already_replaced (c
))
2686 int phi_savings
= 0;
2687 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2688 savings
-= phi_incr_cost (c
, incr
, phi
, &phi_savings
);
2690 if (has_single_use (gimple_phi_result (phi
)))
2691 savings
+= phi_savings
;
2695 savings
+= total_savings (repl_savings
, lookup_cand (c
->dependent
), incr
,
2699 savings
+= total_savings (repl_savings
, lookup_cand (c
->sibling
), incr
,
2705 /* Use target-specific costs to determine and record which increments
2706 in the current candidate tree are profitable to replace, assuming
2707 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2710 One slight limitation here is that we don't account for the possible
2711 introduction of casts in some cases. See replace_one_candidate for
2712 the cases where these are introduced. This should probably be cleaned
2716 analyze_increments (slsr_cand_t first_dep
, enum machine_mode mode
, bool speed
)
2720 for (i
= 0; i
< incr_vec_len
; i
++)
2722 HOST_WIDE_INT incr
= incr_vec
[i
].incr
.to_shwi ();
2724 /* If somehow this increment is bigger than a HWI, we won't
2725 be optimizing candidates that use it. And if the increment
2726 has a count of zero, nothing will be done with it. */
2727 if (!incr_vec
[i
].incr
.fits_shwi () || !incr_vec
[i
].count
)
2728 incr_vec
[i
].cost
= COST_INFINITE
;
2730 /* Increments of 0, 1, and -1 are always profitable to replace,
2731 because they always replace a multiply or add with an add or
2732 copy, and may cause one or more existing instructions to go
2733 dead. Exception: -1 can't be assumed to be profitable for
2734 pointer addition. */
2738 && (gimple_assign_rhs_code (first_dep
->cand_stmt
)
2739 != POINTER_PLUS_EXPR
)))
2740 incr_vec
[i
].cost
= COST_NEUTRAL
;
2742 /* FORNOW: If we need to add an initializer, give up if a cast from
2743 the candidate's type to its stride's type can lose precision.
2744 This could eventually be handled better by expressly retaining the
2745 result of a cast to a wider type in the stride. Example:
2750 _4 = x + _3; ADD: x + (10 * _1) : int
2752 _6 = x + _3; ADD: x + (15 * _1) : int
2754 Right now replacing _6 would cause insertion of an initializer
2755 of the form "short int T = _1 * 5;" followed by a cast to
2756 int, which could overflow incorrectly. Had we recorded _2 or
2757 (int)_1 as the stride, this wouldn't happen. However, doing
2758 this breaks other opportunities, so this will require some
2760 else if (!incr_vec
[i
].initializer
2761 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2762 && !legal_cast_p_1 (first_dep
->stride
,
2763 gimple_assign_lhs (first_dep
->cand_stmt
)))
2765 incr_vec
[i
].cost
= COST_INFINITE
;
2767 /* If we need to add an initializer, make sure we don't introduce
2768 a multiply by a pointer type, which can happen in certain cast
2769 scenarios. FIXME: When cleaning up these cast issues, we can
2770 afford to introduce the multiply provided we cast out to an
2771 unsigned int of appropriate size. */
2772 else if (!incr_vec
[i
].initializer
2773 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2774 && POINTER_TYPE_P (TREE_TYPE (first_dep
->stride
)))
2776 incr_vec
[i
].cost
= COST_INFINITE
;
2778 /* For any other increment, if this is a multiply candidate, we
2779 must introduce a temporary T and initialize it with
2780 T_0 = stride * increment. When optimizing for speed, walk the
2781 candidate tree to calculate the best cost reduction along any
2782 path; if it offsets the fixed cost of inserting the initializer,
2783 replacing the increment is profitable. When optimizing for
2784 size, instead calculate the total cost reduction from replacing
2785 all candidates with this increment. */
2786 else if (first_dep
->kind
== CAND_MULT
)
2788 int cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2789 int repl_savings
= mul_cost (speed
, mode
) - add_cost (speed
, mode
);
2791 cost
= lowest_cost_path (cost
, repl_savings
, first_dep
,
2792 incr_vec
[i
].incr
, COUNT_PHIS
);
2794 cost
-= total_savings (repl_savings
, first_dep
, incr_vec
[i
].incr
,
2797 incr_vec
[i
].cost
= cost
;
2800 /* If this is an add candidate, the initializer may already
2801 exist, so only calculate the cost of the initializer if it
2802 doesn't. We are replacing one add with another here, so the
2803 known replacement savings is zero. We will account for removal
2804 of dead instructions in lowest_cost_path or total_savings. */
2808 if (!incr_vec
[i
].initializer
)
2809 cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2812 cost
= lowest_cost_path (cost
, 0, first_dep
, incr_vec
[i
].incr
,
2815 cost
-= total_savings (0, first_dep
, incr_vec
[i
].incr
,
2818 incr_vec
[i
].cost
= cost
;
2823 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2824 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2825 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2826 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2827 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2830 ncd_for_two_cands (basic_block bb1
, basic_block bb2
,
2831 slsr_cand_t c1
, slsr_cand_t c2
, slsr_cand_t
*where
)
2847 ncd
= nearest_common_dominator (CDI_DOMINATORS
, bb1
, bb2
);
2849 /* If both candidates are in the same block, the earlier
2851 if (bb1
== ncd
&& bb2
== ncd
)
2853 if (!c1
|| (c2
&& c2
->cand_num
< c1
->cand_num
))
2859 /* Otherwise, if one of them produced a candidate in the
2860 dominator, that one wins. */
2861 else if (bb1
== ncd
)
2864 else if (bb2
== ncd
)
2867 /* If neither matches the dominator, neither wins. */
2874 /* Consider all candidates that feed PHI. Find the nearest common
2875 dominator of those candidates requiring the given increment INCR.
2876 Further find and return the nearest common dominator of this result
2877 with block NCD. If the returned block contains one or more of the
2878 candidates, return the earliest candidate in the block in *WHERE. */
2881 ncd_with_phi (slsr_cand_t c
, double_int incr
, gimple phi
,
2882 basic_block ncd
, slsr_cand_t
*where
)
2885 slsr_cand_t basis
= lookup_cand (c
->basis
);
2886 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2888 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2890 tree arg
= gimple_phi_arg_def (phi
, i
);
2892 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2894 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2896 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2897 ncd
= ncd_with_phi (c
, incr
, arg_def
, ncd
, where
);
2900 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2901 double_int diff
= arg_cand
->index
- basis
->index
;
2903 if ((incr
== diff
) || (!address_arithmetic_p
&& incr
== -diff
))
2904 ncd
= ncd_for_two_cands (ncd
, gimple_bb (arg_cand
->cand_stmt
),
2905 *where
, arg_cand
, where
);
2913 /* Consider the candidate C together with any candidates that feed
2914 C's phi dependence (if any). Find and return the nearest common
2915 dominator of those candidates requiring the given increment INCR.
2916 If the returned block contains one or more of the candidates,
2917 return the earliest candidate in the block in *WHERE. */
2920 ncd_of_cand_and_phis (slsr_cand_t c
, double_int incr
, slsr_cand_t
*where
)
2922 basic_block ncd
= NULL
;
2924 if (cand_abs_increment (c
) == incr
)
2926 ncd
= gimple_bb (c
->cand_stmt
);
2930 if (phi_dependent_cand_p (c
))
2931 ncd
= ncd_with_phi (c
, incr
, lookup_cand (c
->def_phi
)->cand_stmt
,
2937 /* Consider all candidates in the tree rooted at C for which INCR
2938 represents the required increment of C relative to its basis.
2939 Find and return the basic block that most nearly dominates all
2940 such candidates. If the returned block contains one or more of
2941 the candidates, return the earliest candidate in the block in
2945 nearest_common_dominator_for_cands (slsr_cand_t c
, double_int incr
,
2948 basic_block sib_ncd
= NULL
, dep_ncd
= NULL
, this_ncd
= NULL
, ncd
;
2949 slsr_cand_t sib_where
= NULL
, dep_where
= NULL
, this_where
= NULL
, new_where
;
2951 /* First find the NCD of all siblings and dependents. */
2953 sib_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->sibling
),
2956 dep_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->dependent
),
2958 if (!sib_ncd
&& !dep_ncd
)
2963 else if (sib_ncd
&& !dep_ncd
)
2965 new_where
= sib_where
;
2968 else if (dep_ncd
&& !sib_ncd
)
2970 new_where
= dep_where
;
2974 ncd
= ncd_for_two_cands (sib_ncd
, dep_ncd
, sib_where
,
2975 dep_where
, &new_where
);
2977 /* If the candidate's increment doesn't match the one we're interested
2978 in (and nor do any increments for feeding defs of a phi-dependence),
2979 then the result depends only on siblings and dependents. */
2980 this_ncd
= ncd_of_cand_and_phis (c
, incr
, &this_where
);
2982 if (!this_ncd
|| cand_already_replaced (c
))
2988 /* Otherwise, compare this candidate with the result from all siblings
2990 ncd
= ncd_for_two_cands (ncd
, this_ncd
, new_where
, this_where
, where
);
2995 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
2998 profitable_increment_p (unsigned index
)
3000 return (incr_vec
[index
].cost
<= COST_NEUTRAL
);
3003 /* For each profitable increment in the increment vector not equal to
3004 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3005 dominator of all statements in the candidate chain rooted at C
3006 that require that increment, and insert an initializer
3007 T_0 = stride * increment at that location. Record T_0 with the
3008 increment record. */
3011 insert_initializers (slsr_cand_t c
)
3015 for (i
= 0; i
< incr_vec_len
; i
++)
3018 slsr_cand_t where
= NULL
;
3020 tree stride_type
, new_name
, incr_tree
;
3021 double_int incr
= incr_vec
[i
].incr
;
3023 if (!profitable_increment_p (i
)
3025 || (incr
.is_minus_one ()
3026 && gimple_assign_rhs_code (c
->cand_stmt
) != POINTER_PLUS_EXPR
)
3030 /* We may have already identified an existing initializer that
3032 if (incr_vec
[i
].initializer
)
3034 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3036 fputs ("Using existing initializer: ", dump_file
);
3037 print_gimple_stmt (dump_file
,
3038 SSA_NAME_DEF_STMT (incr_vec
[i
].initializer
),
3044 /* Find the block that most closely dominates all candidates
3045 with this increment. If there is at least one candidate in
3046 that block, the earliest one will be returned in WHERE. */
3047 bb
= nearest_common_dominator_for_cands (c
, incr
, &where
);
3049 /* Create a new SSA name to hold the initializer's value. */
3050 stride_type
= TREE_TYPE (c
->stride
);
3051 new_name
= make_temp_ssa_name (stride_type
, NULL
, "slsr");
3052 incr_vec
[i
].initializer
= new_name
;
3054 /* Create the initializer and insert it in the latest possible
3055 dominating position. */
3056 incr_tree
= double_int_to_tree (stride_type
, incr
);
3057 init_stmt
= gimple_build_assign_with_ops (MULT_EXPR
, new_name
,
3058 c
->stride
, incr_tree
);
3061 gimple_stmt_iterator gsi
= gsi_for_stmt (where
->cand_stmt
);
3062 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3063 gimple_set_location (init_stmt
, gimple_location (where
->cand_stmt
));
3067 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3068 gimple basis_stmt
= lookup_cand (c
->basis
)->cand_stmt
;
3070 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
3071 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3073 gsi_insert_after (&gsi
, init_stmt
, GSI_SAME_STMT
);
3075 gimple_set_location (init_stmt
, gimple_location (basis_stmt
));
3078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3080 fputs ("Inserting initializer: ", dump_file
);
3081 print_gimple_stmt (dump_file
, init_stmt
, 0, 0);
3086 /* Return TRUE iff all required increments for candidates feeding PHI
3087 are profitable to replace on behalf of candidate C. */
3090 all_phi_incrs_profitable (slsr_cand_t c
, gimple phi
)
3093 slsr_cand_t basis
= lookup_cand (c
->basis
);
3094 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
3096 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3098 tree arg
= gimple_phi_arg_def (phi
, i
);
3100 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3102 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
3104 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3106 if (!all_phi_incrs_profitable (c
, arg_def
))
3112 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3113 double_int increment
= arg_cand
->index
- basis
->index
;
3115 if (!address_arithmetic_p
&& increment
.is_negative ())
3116 increment
= -increment
;
3118 j
= incr_vec_index (increment
);
3120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3122 fprintf (dump_file
, " Conditional candidate %d, phi: ",
3124 print_gimple_stmt (dump_file
, phi
, 0, 0);
3125 fputs (" increment: ", dump_file
);
3126 dump_double_int (dump_file
, increment
, false);
3129 "\n Not replaced; incr_vec overflow.\n");
3131 fprintf (dump_file
, "\n cost: %d\n", incr_vec
[j
].cost
);
3132 if (profitable_increment_p (j
))
3133 fputs (" Replacing...\n", dump_file
);
3135 fputs (" Not replaced.\n", dump_file
);
3139 if (j
< 0 || !profitable_increment_p (j
))
3148 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3149 type TO_TYPE, and insert it in front of the statement represented
3150 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3151 the new SSA name. */
3154 introduce_cast_before_cand (slsr_cand_t c
, tree to_type
, tree from_expr
)
3158 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3160 cast_lhs
= make_temp_ssa_name (to_type
, NULL
, "slsr");
3161 cast_stmt
= gimple_build_assign_with_ops (NOP_EXPR
, cast_lhs
,
3162 from_expr
, NULL_TREE
);
3163 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3164 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3166 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3168 fputs (" Inserting: ", dump_file
);
3169 print_gimple_stmt (dump_file
, cast_stmt
, 0, 0);
3175 /* Replace the RHS of the statement represented by candidate C with
3176 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3177 leave C unchanged or just interchange its operands. The original
3178 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3179 If the replacement was made and we are doing a details dump,
3180 return the revised statement, else NULL. */
3183 replace_rhs_if_not_dup (enum tree_code new_code
, tree new_rhs1
, tree new_rhs2
,
3184 enum tree_code old_code
, tree old_rhs1
, tree old_rhs2
,
3187 if (new_code
!= old_code
3188 || ((!operand_equal_p (new_rhs1
, old_rhs1
, 0)
3189 || !operand_equal_p (new_rhs2
, old_rhs2
, 0))
3190 && (!operand_equal_p (new_rhs1
, old_rhs2
, 0)
3191 || !operand_equal_p (new_rhs2
, old_rhs1
, 0))))
3193 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3194 gimple_assign_set_rhs_with_ops (&gsi
, new_code
, new_rhs1
, new_rhs2
);
3195 update_stmt (gsi_stmt (gsi
));
3196 c
->cand_stmt
= gsi_stmt (gsi
);
3198 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3199 return gsi_stmt (gsi
);
3202 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3203 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3208 /* Strength-reduce the statement represented by candidate C by replacing
3209 it with an equivalent addition or subtraction. I is the index into
3210 the increment vector identifying C's increment. NEW_VAR is used to
3211 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3212 is the rhs1 to use in creating the add/subtract. */
3215 replace_one_candidate (slsr_cand_t c
, unsigned i
, tree basis_name
)
3217 gimple stmt_to_print
= NULL
;
3218 tree orig_rhs1
, orig_rhs2
;
3220 enum tree_code orig_code
, repl_code
;
3221 double_int cand_incr
;
3223 orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3224 orig_rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
3225 orig_rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
3226 cand_incr
= cand_increment (c
);
3228 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3230 fputs ("Replacing: ", dump_file
);
3231 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
3232 stmt_to_print
= c
->cand_stmt
;
3235 if (address_arithmetic_p
)
3236 repl_code
= POINTER_PLUS_EXPR
;
3238 repl_code
= PLUS_EXPR
;
3240 /* If the increment has an initializer T_0, replace the candidate
3241 statement with an add of the basis name and the initializer. */
3242 if (incr_vec
[i
].initializer
)
3244 tree init_type
= TREE_TYPE (incr_vec
[i
].initializer
);
3245 tree orig_type
= TREE_TYPE (orig_rhs2
);
3247 if (types_compatible_p (orig_type
, init_type
))
3248 rhs2
= incr_vec
[i
].initializer
;
3250 rhs2
= introduce_cast_before_cand (c
, orig_type
,
3251 incr_vec
[i
].initializer
);
3253 if (incr_vec
[i
].incr
!= cand_incr
)
3255 gcc_assert (repl_code
== PLUS_EXPR
);
3256 repl_code
= MINUS_EXPR
;
3259 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3260 orig_code
, orig_rhs1
, orig_rhs2
,
3264 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3265 with a subtract of the stride from the basis name, a copy
3266 from the basis name, or an add of the stride to the basis
3267 name, respectively. It may be necessary to introduce a
3268 cast (or reuse an existing cast). */
3269 else if (cand_incr
.is_one ())
3271 tree stride_type
= TREE_TYPE (c
->stride
);
3272 tree orig_type
= TREE_TYPE (orig_rhs2
);
3274 if (types_compatible_p (orig_type
, stride_type
))
3277 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3279 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3280 orig_code
, orig_rhs1
, orig_rhs2
,
3284 else if (cand_incr
.is_minus_one ())
3286 tree stride_type
= TREE_TYPE (c
->stride
);
3287 tree orig_type
= TREE_TYPE (orig_rhs2
);
3288 gcc_assert (repl_code
!= POINTER_PLUS_EXPR
);
3290 if (types_compatible_p (orig_type
, stride_type
))
3293 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3295 if (orig_code
!= MINUS_EXPR
3296 || !operand_equal_p (basis_name
, orig_rhs1
, 0)
3297 || !operand_equal_p (rhs2
, orig_rhs2
, 0))
3299 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3300 gimple_assign_set_rhs_with_ops (&gsi
, MINUS_EXPR
, basis_name
, rhs2
);
3301 update_stmt (gsi_stmt (gsi
));
3302 c
->cand_stmt
= gsi_stmt (gsi
);
3304 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3305 stmt_to_print
= gsi_stmt (gsi
);
3307 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3308 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3311 else if (cand_incr
.is_zero ())
3313 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
3314 tree lhs_type
= TREE_TYPE (lhs
);
3315 tree basis_type
= TREE_TYPE (basis_name
);
3317 if (types_compatible_p (lhs_type
, basis_type
))
3319 gimple copy_stmt
= gimple_build_assign (lhs
, basis_name
);
3320 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3321 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
3322 gsi_replace (&gsi
, copy_stmt
, false);
3323 c
->cand_stmt
= copy_stmt
;
3325 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3326 stmt_to_print
= copy_stmt
;
3330 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3331 gimple cast_stmt
= gimple_build_assign_with_ops (NOP_EXPR
, lhs
,
3334 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3335 gsi_replace (&gsi
, cast_stmt
, false);
3336 c
->cand_stmt
= cast_stmt
;
3338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3339 stmt_to_print
= cast_stmt
;
3345 if (dump_file
&& (dump_flags
& TDF_DETAILS
) && stmt_to_print
)
3347 fputs ("With: ", dump_file
);
3348 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
3349 fputs ("\n", dump_file
);
3353 /* For each candidate in the tree rooted at C, replace it with
3354 an increment if such has been shown to be profitable. */
3357 replace_profitable_candidates (slsr_cand_t c
)
3359 if (!cand_already_replaced (c
))
3361 double_int increment
= cand_abs_increment (c
);
3362 enum tree_code orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3365 i
= incr_vec_index (increment
);
3367 /* Only process profitable increments. Nothing useful can be done
3368 to a cast or copy. */
3370 && profitable_increment_p (i
)
3371 && orig_code
!= MODIFY_EXPR
3372 && orig_code
!= NOP_EXPR
)
3374 if (phi_dependent_cand_p (c
))
3376 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
3378 if (all_phi_incrs_profitable (c
, phi
))
3380 /* Look up the LHS SSA name from C's basis. This will be
3381 the RHS1 of the adds we will introduce to create new
3383 slsr_cand_t basis
= lookup_cand (c
->basis
);
3384 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3386 /* Create a new phi statement that will represent C's true
3387 basis after the transformation is complete. */
3388 location_t loc
= gimple_location (c
->cand_stmt
);
3389 tree name
= create_phi_basis (c
, phi
, basis_name
,
3390 loc
, UNKNOWN_STRIDE
);
3392 /* Replace C with an add of the new basis phi and the
3394 replace_one_candidate (c
, i
, name
);
3399 slsr_cand_t basis
= lookup_cand (c
->basis
);
3400 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3401 replace_one_candidate (c
, i
, basis_name
);
3407 replace_profitable_candidates (lookup_cand (c
->sibling
));
3410 replace_profitable_candidates (lookup_cand (c
->dependent
));
3413 /* Analyze costs of related candidates in the candidate vector,
3414 and make beneficial replacements. */
3417 analyze_candidates_and_replace (void)
3422 /* Each candidate that has a null basis and a non-null
3423 dependent is the root of a tree of related statements.
3424 Analyze each tree to determine a subset of those
3425 statements that can be replaced with maximum benefit. */
3426 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
3428 slsr_cand_t first_dep
;
3430 if (c
->basis
!= 0 || c
->dependent
== 0)
3433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3434 fprintf (dump_file
, "\nProcessing dependency tree rooted at %d.\n",
3437 first_dep
= lookup_cand (c
->dependent
);
3439 /* If this is a chain of CAND_REFs, unconditionally replace
3440 each of them with a strength-reduced data reference. */
3441 if (c
->kind
== CAND_REF
)
3444 /* If the common stride of all related candidates is a known
3445 constant, each candidate without a phi-dependence can be
3446 profitably replaced. Each replaces a multiply by a single
3447 add, with the possibility that a feeding add also goes dead.
3448 A candidate with a phi-dependence is replaced only if the
3449 compensation code it requires is offset by the strength
3450 reduction savings. */
3451 else if (TREE_CODE (c
->stride
) == INTEGER_CST
)
3452 replace_uncond_cands_and_profitable_phis (first_dep
);
3454 /* When the stride is an SSA name, it may still be profitable
3455 to replace some or all of the dependent candidates, depending
3456 on whether the introduced increments can be reused, or are
3457 less expensive to calculate than the replaced statements. */
3460 enum machine_mode mode
;
3463 /* Determine whether we'll be generating pointer arithmetic
3464 when replacing candidates. */
3465 address_arithmetic_p
= (c
->kind
== CAND_ADD
3466 && POINTER_TYPE_P (c
->cand_type
));
3468 /* If all candidates have already been replaced under other
3469 interpretations, nothing remains to be done. */
3470 if (!count_candidates (c
))
3473 /* Construct an array of increments for this candidate chain. */
3474 incr_vec
= XNEWVEC (incr_info
, MAX_INCR_VEC_LEN
);
3476 record_increments (c
);
3478 /* Determine which increments are profitable to replace. */
3479 mode
= TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
)));
3480 speed
= optimize_cands_for_speed_p (c
);
3481 analyze_increments (first_dep
, mode
, speed
);
3483 /* Insert initializers of the form T_0 = stride * increment
3484 for use in profitable replacements. */
3485 insert_initializers (first_dep
);
3488 /* Perform the replacements. */
3489 replace_profitable_candidates (first_dep
);
3496 execute_strength_reduction (void)
3498 /* Create the obstack where candidates will reside. */
3499 gcc_obstack_init (&cand_obstack
);
3501 /* Allocate the candidate vector. */
3502 cand_vec
.create (128);
3504 /* Allocate the mapping from statements to candidate indices. */
3505 stmt_cand_map
= pointer_map_create ();
3507 /* Create the obstack where candidate chains will reside. */
3508 gcc_obstack_init (&chain_obstack
);
3510 /* Allocate the mapping from base expressions to candidate chains. */
3511 base_cand_map
.create (500);
3513 /* Initialize the loop optimizer. We need to detect flow across
3514 back edges, and this gives us dominator information as well. */
3515 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
3517 /* Walk the CFG in predominator order looking for strength reduction
3519 find_candidates_dom_walker (CDI_DOMINATORS
)
3520 .walk (cfun
->cfg
->x_entry_block_ptr
);
3522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3525 dump_cand_chains ();
3528 /* Analyze costs and make appropriate replacements. */
3529 analyze_candidates_and_replace ();
3531 loop_optimizer_finalize ();
3532 base_cand_map
.dispose ();
3533 obstack_free (&chain_obstack
, NULL
);
3534 pointer_map_destroy (stmt_cand_map
);
3535 cand_vec
.release ();
3536 obstack_free (&cand_obstack
, NULL
);
3542 gate_strength_reduction (void)
3544 return flag_tree_slsr
;
3549 const pass_data pass_data_strength_reduction
=
3551 GIMPLE_PASS
, /* type */
3553 OPTGROUP_NONE
, /* optinfo_flags */
3554 true, /* has_gate */
3555 true, /* has_execute */
3556 TV_GIMPLE_SLSR
, /* tv_id */
3557 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3558 0, /* properties_provided */
3559 0, /* properties_destroyed */
3560 0, /* todo_flags_start */
3561 TODO_verify_ssa
, /* todo_flags_finish */
3564 class pass_strength_reduction
: public gimple_opt_pass
3567 pass_strength_reduction(gcc::context
*ctxt
)
3568 : gimple_opt_pass(pass_data_strength_reduction
, ctxt
)
3571 /* opt_pass methods: */
3572 bool gate () { return gate_strength_reduction (); }
3573 unsigned int execute () { return execute_strength_reduction (); }
3575 }; // class pass_strength_reduction
3580 make_pass_strength_reduction (gcc::context
*ctxt
)
3582 return new pass_strength_reduction (ctxt
);