1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2019 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
38 #include "coretypes.h"
44 #include "tree-pass.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
61 /* Information about a strength reduction candidate. Each statement
62 in the candidate table represents an expression of one of the
63 following forms (the special case of CAND_REF will be described
66 (CAND_MULT) S1: X = (B + i) * S
67 (CAND_ADD) S1: X = B + (i * S)
69 Here X and B are SSA names, i is an integer constant, and S is
70 either an SSA name or a constant. We call B the "base," i the
71 "index", and S the "stride."
73 Any statement S0 that dominates S1 and is of the form:
75 (CAND_MULT) S0: Y = (B + i') * S
76 (CAND_ADD) S0: Y = B + (i' * S)
78 is called a "basis" for S1. In both cases, S1 may be replaced by
80 S1': X = Y + (i - i') * S,
82 where (i - i') * S is folded to the extent possible.
84 All gimple statements are visited in dominator order, and each
85 statement that may contribute to one of the forms of S1 above is
86 given at least one entry in the candidate table. Such statements
87 include addition, pointer addition, subtraction, multiplication,
88 negation, copies, and nontrivial type casts. If a statement may
89 represent more than one expression of the forms of S1 above,
90 multiple "interpretations" are stored in the table and chained
93 * An add of two SSA names may treat either operand as the base.
94 * A multiply of two SSA names, likewise.
95 * A copy or cast may be thought of as either a CAND_MULT with
96 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
98 Candidate records are allocated from an obstack. They are addressed
99 both from a hash table keyed on S1, and from a vector of candidate
100 pointers arranged in predominator order.
104 Currently we don't recognize:
109 as a strength reduction opportunity, even though this S1 would
110 also be replaceable by the S1' above. This can be added if it
111 comes up in practice.
113 Strength reduction in addressing
114 --------------------------------
115 There is another kind of candidate known as CAND_REF. A CAND_REF
116 describes a statement containing a memory reference having
117 complex addressing that might benefit from strength reduction.
118 Specifically, we are interested in references for which
119 get_inner_reference returns a base address, offset, and bitpos as
122 base: MEM_REF (T1, C1)
123 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
124 bitpos: C4 * BITS_PER_UNIT
126 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
127 arbitrary integer constants. Note that C2 may be zero, in which
128 case the offset will be MULT_EXPR (T2, C3).
130 When this pattern is recognized, the original memory reference
131 can be replaced with:
133 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
136 which distributes the multiply to allow constant folding. When
137 two or more addressing expressions can be represented by MEM_REFs
138 of this form, differing only in the constants C1, C2, and C4,
139 making this substitution produces more efficient addressing during
140 the RTL phases. When there are not at least two expressions with
141 the same values of T1, T2, and C3, there is nothing to be gained
144 Strength reduction of CAND_REFs uses the same infrastructure as
145 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
146 field, MULT_EXPR (T2, C3) in the stride (S) field, and
147 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
148 is thus another CAND_REF with the same B and S values. When at
149 least two CAND_REFs are chained together using the basis relation,
150 each of them is replaced as above, resulting in improved code
151 generation for addressing.
153 Conditional candidates
154 ======================
156 Conditional candidates are best illustrated with an example.
157 Consider the code sequence:
160 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
162 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
163 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
164 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
165 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
167 Here strength reduction is complicated by the uncertain value of x_2.
168 A legitimate transformation is:
177 (4) [x_2 = PHI <x_0, x_1>;]
178 (4a) t_2 = PHI <a_0, t_1>;
182 where the bracketed instructions may go dead.
184 To recognize this opportunity, we have to observe that statement (6)
185 has a "hidden basis" (2). The hidden basis is unlike a normal basis
186 in that the statement and the hidden basis have different base SSA
187 names (x_2 and x_0, respectively). The relationship is established
188 when a statement's base name (x_2) is defined by a phi statement (4),
189 each argument of which (x_0, x_1) has an identical "derived base name."
190 If the argument is defined by a candidate (as x_1 is by (3)) that is a
191 CAND_ADD having a stride of 1, the derived base name of the argument is
192 the base name of the candidate (x_0). Otherwise, the argument itself
193 is its derived base name (as is the case with argument x_0).
195 The hidden basis for statement (6) is the nearest dominating candidate
196 whose base name is the derived base name (x_0) of the feeding phi (4),
197 and whose stride is identical to that of the statement. We can then
198 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
199 allowing the final replacement of (6) by the strength-reduced (6r).
201 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
202 A CAND_PHI is not a candidate for replacement, but is maintained in the
203 candidate table to ease discovery of hidden bases. Any phi statement
204 whose arguments share a common derived base name is entered into the
205 table with the derived base name, an (arbitrary) index of zero, and a
206 stride of 1. A statement with a hidden basis can then be detected by
207 simply looking up its feeding phi definition in the candidate table,
208 extracting the derived base name, and searching for a basis in the
209 usual manner after substituting the derived base name.
211 Note that the transformation is only valid when the original phi and
212 the statements that define the phi's arguments are all at the same
213 position in the loop hierarchy. */
216 /* Index into the candidate vector, offset by 1. VECs are zero-based,
217 while cand_idx's are one-based, with zero indicating null. */
218 typedef unsigned cand_idx
;
220 /* The kind of candidate. */
231 /* The candidate statement S1. */
234 /* The base expression B: often an SSA name, but not always. */
240 /* The index constant i. */
243 /* The type of the candidate. This is normally the type of base_expr,
244 but casts may have occurred when combining feeding instructions.
245 A candidate can only be a basis for candidates of the same final type.
246 (For CAND_REFs, this is the type to be used for operand 1 of the
247 replacement MEM_REF.) */
250 /* The type to be used to interpret the stride field when the stride
251 is not a constant. Normally the same as the type of the recorded
252 stride, but when the stride has been cast we need to maintain that
253 knowledge in order to make legal substitutions without losing
254 precision. When the stride is a constant, this will be sizetype. */
257 /* The kind of candidate (CAND_MULT, etc.). */
260 /* Index of this candidate in the candidate vector. */
263 /* Index of the next candidate record for the same statement.
264 A statement may be useful in more than one way (e.g., due to
265 commutativity). So we can have multiple "interpretations"
267 cand_idx next_interp
;
269 /* Index of the first candidate record in a chain for the same
271 cand_idx first_interp
;
273 /* Index of the basis statement S0, if any, in the candidate vector. */
276 /* First candidate for which this candidate is a basis, if one exists. */
279 /* Next candidate having the same basis as this one. */
282 /* If this is a conditional candidate, the CAND_PHI candidate
283 that defines the base SSA name B. */
286 /* Savings that can be expected from eliminating dead code if this
287 candidate is replaced. */
290 /* For PHI candidates, use a visited flag to keep from processing the
291 same PHI twice from multiple paths. */
294 /* We sometimes have to cache a phi basis with a phi candidate to
295 avoid processing it twice. Valid only if visited==1. */
299 typedef struct slsr_cand_d slsr_cand
, *slsr_cand_t
;
300 typedef const struct slsr_cand_d
*const_slsr_cand_t
;
302 /* Pointers to candidates are chained together as part of a mapping
303 from base expressions to the candidates that use them. */
307 /* Base expression for the chain of candidates: often, but not
308 always, an SSA name. */
311 /* Pointer to a candidate. */
315 struct cand_chain_d
*next
;
319 typedef struct cand_chain_d cand_chain
, *cand_chain_t
;
320 typedef const struct cand_chain_d
*const_cand_chain_t
;
322 /* Information about a unique "increment" associated with candidates
323 having an SSA name for a stride. An increment is the difference
324 between the index of the candidate and the index of its basis,
325 i.e., (i - i') as discussed in the module commentary.
327 When we are not going to generate address arithmetic we treat
328 increments that differ only in sign as the same, allowing sharing
329 of the cost of initializers. The absolute value of the increment
330 is stored in the incr_info. */
334 /* The increment that relates a candidate to its basis. */
337 /* How many times the increment occurs in the candidate tree. */
340 /* Cost of replacing candidates using this increment. Negative and
341 zero costs indicate replacement should be performed. */
344 /* If this increment is profitable but is not -1, 0, or 1, it requires
345 an initializer T_0 = stride * incr to be found or introduced in the
346 nearest common dominator of all candidates. This field holds T_0
347 for subsequent use. */
350 /* If the initializer was found to already exist, this is the block
351 where it was found. */
355 typedef struct incr_info_d incr_info
, *incr_info_t
;
357 /* Candidates are maintained in a vector. If candidate X dominates
358 candidate Y, then X appears before Y in the vector; but the
359 converse does not necessarily hold. */
360 static vec
<slsr_cand_t
> cand_vec
;
374 enum phi_adjust_status
380 enum count_phis_status
386 /* Constrain how many PHI nodes we will visit for a conditional
387 candidate (depth and breadth). */
388 const int MAX_SPREAD
= 16;
390 /* Pointer map embodying a mapping from statements to candidates. */
391 static hash_map
<gimple
*, slsr_cand_t
> *stmt_cand_map
;
393 /* Obstack for candidates. */
394 static struct obstack cand_obstack
;
396 /* Obstack for candidate chains. */
397 static struct obstack chain_obstack
;
399 /* An array INCR_VEC of incr_infos is used during analysis of related
400 candidates having an SSA name for a stride. INCR_VEC_LEN describes
401 its current length. MAX_INCR_VEC_LEN is used to avoid costly
402 pathological cases. */
403 static incr_info_t incr_vec
;
404 static unsigned incr_vec_len
;
405 const int MAX_INCR_VEC_LEN
= 16;
407 /* For a chain of candidates with unknown stride, indicates whether or not
408 we must generate pointer arithmetic when replacing statements. */
409 static bool address_arithmetic_p
;
411 /* Forward function declarations. */
412 static slsr_cand_t
base_cand_from_table (tree
);
413 static tree
introduce_cast_before_cand (slsr_cand_t
, tree
, tree
);
414 static bool legal_cast_p_1 (tree
, tree
);
416 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
419 lookup_cand (cand_idx idx
)
421 return cand_vec
[idx
];
424 /* Helper for hashing a candidate chain header. */
426 struct cand_chain_hasher
: nofree_ptr_hash
<cand_chain
>
428 static inline hashval_t
hash (const cand_chain
*);
429 static inline bool equal (const cand_chain
*, const cand_chain
*);
433 cand_chain_hasher::hash (const cand_chain
*p
)
435 tree base_expr
= p
->base_expr
;
436 return iterative_hash_expr (base_expr
, 0);
440 cand_chain_hasher::equal (const cand_chain
*chain1
, const cand_chain
*chain2
)
442 return operand_equal_p (chain1
->base_expr
, chain2
->base_expr
, 0);
445 /* Hash table embodying a mapping from base exprs to chains of candidates. */
446 static hash_table
<cand_chain_hasher
> *base_cand_map
;
448 /* Pointer map used by tree_to_aff_combination_expand. */
449 static hash_map
<tree
, name_expansion
*> *name_expansions
;
450 /* Pointer map embodying a mapping from bases to alternative bases. */
451 static hash_map
<tree
, tree
> *alt_base_map
;
453 /* Given BASE, use the tree affine combiniation facilities to
454 find the underlying tree expression for BASE, with any
455 immediate offset excluded.
457 N.B. we should eliminate this backtracking with better forward
458 analysis in a future release. */
461 get_alternative_base (tree base
)
463 tree
*result
= alt_base_map
->get (base
);
470 tree_to_aff_combination_expand (base
, TREE_TYPE (base
),
471 &aff
, &name_expansions
);
473 expr
= aff_combination_to_tree (&aff
);
475 gcc_assert (!alt_base_map
->put (base
, base
== expr
? NULL
: expr
));
477 return expr
== base
? NULL
: expr
;
483 /* Look in the candidate table for a CAND_PHI that defines BASE and
484 return it if found; otherwise return NULL. */
487 find_phi_def (tree base
)
491 if (TREE_CODE (base
) != SSA_NAME
)
494 c
= base_cand_from_table (base
);
496 if (!c
|| c
->kind
!= CAND_PHI
497 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c
->cand_stmt
)))
503 /* Determine whether all uses of NAME are directly or indirectly
504 used by STMT. That is, we want to know whether if STMT goes
505 dead, the definition of NAME also goes dead. */
507 uses_consumed_by_stmt (tree name
, gimple
*stmt
, unsigned recurse
= 0)
510 imm_use_iterator iter
;
513 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
515 if (use_stmt
== stmt
|| is_gimple_debug (use_stmt
))
518 if (!is_gimple_assign (use_stmt
)
519 || !gimple_get_lhs (use_stmt
)
520 || !is_gimple_reg (gimple_get_lhs (use_stmt
))
522 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt
), stmt
,
526 BREAK_FROM_IMM_USE_STMT (iter
);
533 /* Helper routine for find_basis_for_candidate. May be called twice:
534 once for the candidate's base expr, and optionally again either for
535 the candidate's phi definition or for a CAND_REF's alternative base
539 find_basis_for_base_expr (slsr_cand_t c
, tree base_expr
)
541 cand_chain mapping_key
;
543 slsr_cand_t basis
= NULL
;
545 // Limit potential of N^2 behavior for long candidate chains.
547 int max_iters
= PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN
);
549 mapping_key
.base_expr
= base_expr
;
550 chain
= base_cand_map
->find (&mapping_key
);
552 for (; chain
&& iters
< max_iters
; chain
= chain
->next
, ++iters
)
554 slsr_cand_t one_basis
= chain
->cand
;
556 if (one_basis
->kind
!= c
->kind
557 || one_basis
->cand_stmt
== c
->cand_stmt
558 || !operand_equal_p (one_basis
->stride
, c
->stride
, 0)
559 || !types_compatible_p (one_basis
->cand_type
, c
->cand_type
)
560 || !types_compatible_p (one_basis
->stride_type
, c
->stride_type
)
561 || !dominated_by_p (CDI_DOMINATORS
,
562 gimple_bb (c
->cand_stmt
),
563 gimple_bb (one_basis
->cand_stmt
)))
566 tree lhs
= gimple_assign_lhs (one_basis
->cand_stmt
);
567 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
568 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
571 if (!basis
|| basis
->cand_num
< one_basis
->cand_num
)
578 /* Use the base expr from candidate C to look for possible candidates
579 that can serve as a basis for C. Each potential basis must also
580 appear in a block that dominates the candidate statement and have
581 the same stride and type. If more than one possible basis exists,
582 the one with highest index in the vector is chosen; this will be
583 the most immediately dominating basis. */
586 find_basis_for_candidate (slsr_cand_t c
)
588 slsr_cand_t basis
= find_basis_for_base_expr (c
, c
->base_expr
);
590 /* If a candidate doesn't have a basis using its base expression,
591 it may have a basis hidden by one or more intervening phis. */
592 if (!basis
&& c
->def_phi
)
594 basic_block basis_bb
, phi_bb
;
595 slsr_cand_t phi_cand
= lookup_cand (c
->def_phi
);
596 basis
= find_basis_for_base_expr (c
, phi_cand
->base_expr
);
600 /* A hidden basis must dominate the phi-definition of the
601 candidate's base name. */
602 phi_bb
= gimple_bb (phi_cand
->cand_stmt
);
603 basis_bb
= gimple_bb (basis
->cand_stmt
);
605 if (phi_bb
== basis_bb
606 || !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
612 /* If we found a hidden basis, estimate additional dead-code
613 savings if the phi and its feeding statements can be removed. */
614 tree feeding_var
= gimple_phi_result (phi_cand
->cand_stmt
);
615 if (basis
&& uses_consumed_by_stmt (feeding_var
, c
->cand_stmt
))
616 c
->dead_savings
+= phi_cand
->dead_savings
;
620 if (flag_expensive_optimizations
&& !basis
&& c
->kind
== CAND_REF
)
622 tree alt_base_expr
= get_alternative_base (c
->base_expr
);
624 basis
= find_basis_for_base_expr (c
, alt_base_expr
);
629 c
->sibling
= basis
->dependent
;
630 basis
->dependent
= c
->cand_num
;
631 return basis
->cand_num
;
637 /* Record a mapping from BASE to C, indicating that C may potentially serve
638 as a basis using that base expression. BASE may be the same as
639 C->BASE_EXPR; alternatively BASE can be a different tree that share the
640 underlining expression of C->BASE_EXPR. */
643 record_potential_basis (slsr_cand_t c
, tree base
)
650 node
= (cand_chain_t
) obstack_alloc (&chain_obstack
, sizeof (cand_chain
));
651 node
->base_expr
= base
;
654 slot
= base_cand_map
->find_slot (node
, INSERT
);
658 cand_chain_t head
= (cand_chain_t
) (*slot
);
659 node
->next
= head
->next
;
666 /* Allocate storage for a new candidate and initialize its fields.
667 Attempt to find a basis for the candidate.
669 For CAND_REF, an alternative base may also be recorded and used
670 to find a basis. This helps cases where the expression hidden
671 behind BASE (which is usually an SSA_NAME) has immediate offset,
675 a2[i + 20][j] = 2; */
678 alloc_cand_and_find_basis (enum cand_kind kind
, gimple
*gs
, tree base
,
679 const widest_int
&index
, tree stride
, tree ctype
,
680 tree stype
, unsigned savings
)
682 slsr_cand_t c
= (slsr_cand_t
) obstack_alloc (&cand_obstack
,
688 c
->cand_type
= ctype
;
689 c
->stride_type
= stype
;
691 c
->cand_num
= cand_vec
.length ();
693 c
->first_interp
= c
->cand_num
;
696 c
->def_phi
= kind
== CAND_MULT
? find_phi_def (base
) : 0;
697 c
->dead_savings
= savings
;
699 c
->cached_basis
= NULL_TREE
;
701 cand_vec
.safe_push (c
);
703 if (kind
== CAND_PHI
)
706 c
->basis
= find_basis_for_candidate (c
);
708 record_potential_basis (c
, base
);
709 if (flag_expensive_optimizations
&& kind
== CAND_REF
)
711 tree alt_base
= get_alternative_base (base
);
713 record_potential_basis (c
, alt_base
);
719 /* Determine the target cost of statement GS when compiling according
723 stmt_cost (gimple
*gs
, bool speed
)
725 tree lhs
, rhs1
, rhs2
;
726 machine_mode lhs_mode
;
728 gcc_assert (is_gimple_assign (gs
));
729 lhs
= gimple_assign_lhs (gs
);
730 rhs1
= gimple_assign_rhs1 (gs
);
731 lhs_mode
= TYPE_MODE (TREE_TYPE (lhs
));
733 switch (gimple_assign_rhs_code (gs
))
736 rhs2
= gimple_assign_rhs2 (gs
);
738 if (tree_fits_shwi_p (rhs2
))
739 return mult_by_coeff_cost (tree_to_shwi (rhs2
), lhs_mode
, speed
);
741 gcc_assert (TREE_CODE (rhs1
) != INTEGER_CST
);
742 return mul_cost (speed
, lhs_mode
);
745 case POINTER_PLUS_EXPR
:
747 return add_cost (speed
, lhs_mode
);
750 return neg_cost (speed
, lhs_mode
);
753 return convert_cost (lhs_mode
, TYPE_MODE (TREE_TYPE (rhs1
)), speed
);
755 /* Note that we don't assign costs to copies that in most cases
768 /* Look up the defining statement for BASE_IN and return a pointer
769 to its candidate in the candidate table, if any; otherwise NULL.
770 Only CAND_ADD and CAND_MULT candidates are returned. */
773 base_cand_from_table (tree base_in
)
777 gimple
*def
= SSA_NAME_DEF_STMT (base_in
);
779 return (slsr_cand_t
) NULL
;
781 result
= stmt_cand_map
->get (def
);
783 if (result
&& (*result
)->kind
!= CAND_REF
)
786 return (slsr_cand_t
) NULL
;
789 /* Add an entry to the statement-to-candidate mapping. */
792 add_cand_for_stmt (gimple
*gs
, slsr_cand_t c
)
794 gcc_assert (!stmt_cand_map
->put (gs
, c
));
797 /* Given PHI which contains a phi statement, determine whether it
798 satisfies all the requirements of a phi candidate. If so, create
799 a candidate. Note that a CAND_PHI never has a basis itself, but
800 is used to help find a basis for subsequent candidates. */
803 slsr_process_phi (gphi
*phi
, bool speed
)
806 tree arg0_base
= NULL_TREE
, base_type
;
808 struct loop
*cand_loop
= gimple_bb (phi
)->loop_father
;
809 unsigned savings
= 0;
811 /* A CAND_PHI requires each of its arguments to have the same
812 derived base name. (See the module header commentary for a
813 definition of derived base names.) Furthermore, all feeding
814 definitions must be in the same position in the loop hierarchy
817 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
819 slsr_cand_t arg_cand
;
820 tree arg
= gimple_phi_arg_def (phi
, i
);
821 tree derived_base_name
= NULL_TREE
;
822 gimple
*arg_stmt
= NULL
;
823 basic_block arg_bb
= NULL
;
825 if (TREE_CODE (arg
) != SSA_NAME
)
828 arg_cand
= base_cand_from_table (arg
);
832 while (arg_cand
->kind
!= CAND_ADD
&& arg_cand
->kind
!= CAND_PHI
)
834 if (!arg_cand
->next_interp
)
837 arg_cand
= lookup_cand (arg_cand
->next_interp
);
840 if (!integer_onep (arg_cand
->stride
))
843 derived_base_name
= arg_cand
->base_expr
;
844 arg_stmt
= arg_cand
->cand_stmt
;
845 arg_bb
= gimple_bb (arg_stmt
);
847 /* Gather potential dead code savings if the phi statement
848 can be removed later on. */
849 if (uses_consumed_by_stmt (arg
, phi
))
851 if (gimple_code (arg_stmt
) == GIMPLE_PHI
)
852 savings
+= arg_cand
->dead_savings
;
854 savings
+= stmt_cost (arg_stmt
, speed
);
857 else if (SSA_NAME_IS_DEFAULT_DEF (arg
))
859 derived_base_name
= arg
;
860 arg_bb
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
863 if (!arg_bb
|| arg_bb
->loop_father
!= cand_loop
)
867 arg0_base
= derived_base_name
;
868 else if (!operand_equal_p (derived_base_name
, arg0_base
, 0))
872 /* Create the candidate. "alloc_cand_and_find_basis" is named
873 misleadingly for this case, as no basis will be sought for a
875 base_type
= TREE_TYPE (arg0_base
);
877 c
= alloc_cand_and_find_basis (CAND_PHI
, phi
, arg0_base
,
878 0, integer_one_node
, base_type
,
881 /* Add the candidate to the statement-candidate mapping. */
882 add_cand_for_stmt (phi
, c
);
885 /* Given PBASE which is a pointer to tree, look up the defining
886 statement for it and check whether the candidate is in the
889 X = B + (1 * S), S is integer constant
890 X = B + (i * S), S is integer one
892 If so, set PBASE to the candidate's base_expr and return double
894 Otherwise, just return double int zero. */
897 backtrace_base_for_ref (tree
*pbase
)
899 tree base_in
= *pbase
;
900 slsr_cand_t base_cand
;
902 STRIP_NOPS (base_in
);
904 /* Strip off widening conversion(s) to handle cases where
905 e.g. 'B' is widened from an 'int' in order to calculate
907 if (CONVERT_EXPR_P (base_in
)
908 && legal_cast_p_1 (TREE_TYPE (base_in
),
909 TREE_TYPE (TREE_OPERAND (base_in
, 0))))
910 base_in
= get_unwidened (base_in
, NULL_TREE
);
912 if (TREE_CODE (base_in
) != SSA_NAME
)
915 base_cand
= base_cand_from_table (base_in
);
917 while (base_cand
&& base_cand
->kind
!= CAND_PHI
)
919 if (base_cand
->kind
== CAND_ADD
920 && base_cand
->index
== 1
921 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
923 /* X = B + (1 * S), S is integer constant. */
924 *pbase
= base_cand
->base_expr
;
925 return wi::to_widest (base_cand
->stride
);
927 else if (base_cand
->kind
== CAND_ADD
928 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
929 && integer_onep (base_cand
->stride
))
931 /* X = B + (i * S), S is integer one. */
932 *pbase
= base_cand
->base_expr
;
933 return base_cand
->index
;
936 base_cand
= lookup_cand (base_cand
->next_interp
);
942 /* Look for the following pattern:
944 *PBASE: MEM_REF (T1, C1)
946 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
948 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
950 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
952 *PINDEX: C4 * BITS_PER_UNIT
954 If not present, leave the input values unchanged and return FALSE.
955 Otherwise, modify the input values as follows and return TRUE:
958 *POFFSET: MULT_EXPR (T2, C3)
959 *PINDEX: C1 + (C2 * C3) + C4
961 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
962 will be further restructured to:
965 *POFFSET: MULT_EXPR (T2', C3)
966 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
969 restructure_reference (tree
*pbase
, tree
*poffset
, widest_int
*pindex
,
972 tree base
= *pbase
, offset
= *poffset
;
973 widest_int index
= *pindex
;
974 tree mult_op0
, t1
, t2
, type
;
975 widest_int c1
, c2
, c3
, c4
, c5
;
976 offset_int mem_offset
;
980 || TREE_CODE (base
) != MEM_REF
981 || !mem_ref_offset (base
).is_constant (&mem_offset
)
982 || TREE_CODE (offset
) != MULT_EXPR
983 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
984 || wi::umod_floor (index
, BITS_PER_UNIT
) != 0)
987 t1
= TREE_OPERAND (base
, 0);
988 c1
= widest_int::from (mem_offset
, SIGNED
);
989 type
= TREE_TYPE (TREE_OPERAND (base
, 1));
991 mult_op0
= TREE_OPERAND (offset
, 0);
992 c3
= wi::to_widest (TREE_OPERAND (offset
, 1));
994 if (TREE_CODE (mult_op0
) == PLUS_EXPR
)
996 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
998 t2
= TREE_OPERAND (mult_op0
, 0);
999 c2
= wi::to_widest (TREE_OPERAND (mult_op0
, 1));
1004 else if (TREE_CODE (mult_op0
) == MINUS_EXPR
)
1006 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
1008 t2
= TREE_OPERAND (mult_op0
, 0);
1009 c2
= -wi::to_widest (TREE_OPERAND (mult_op0
, 1));
1020 c4
= index
>> LOG2_BITS_PER_UNIT
;
1021 c5
= backtrace_base_for_ref (&t2
);
1024 *poffset
= fold_build2 (MULT_EXPR
, sizetype
, fold_convert (sizetype
, t2
),
1025 wide_int_to_tree (sizetype
, c3
));
1026 *pindex
= c1
+ c2
* c3
+ c4
+ c5
* c3
;
1032 /* Given GS which contains a data reference, create a CAND_REF entry in
1033 the candidate table and attempt to find a basis. */
1036 slsr_process_ref (gimple
*gs
)
1038 tree ref_expr
, base
, offset
, type
;
1039 poly_int64 bitsize
, bitpos
;
1041 int unsignedp
, reversep
, volatilep
;
1044 if (gimple_vdef (gs
))
1045 ref_expr
= gimple_assign_lhs (gs
);
1047 ref_expr
= gimple_assign_rhs1 (gs
);
1049 if (!handled_component_p (ref_expr
)
1050 || TREE_CODE (ref_expr
) == BIT_FIELD_REF
1051 || (TREE_CODE (ref_expr
) == COMPONENT_REF
1052 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr
, 1))))
1055 base
= get_inner_reference (ref_expr
, &bitsize
, &bitpos
, &offset
, &mode
,
1056 &unsignedp
, &reversep
, &volatilep
);
1057 HOST_WIDE_INT cbitpos
;
1058 if (reversep
|| !bitpos
.is_constant (&cbitpos
))
1060 widest_int index
= cbitpos
;
1062 if (!restructure_reference (&base
, &offset
, &index
, &type
))
1065 c
= alloc_cand_and_find_basis (CAND_REF
, gs
, base
, index
, offset
,
1068 /* Add the candidate to the statement-candidate mapping. */
1069 add_cand_for_stmt (gs
, c
);
1072 /* Create a candidate entry for a statement GS, where GS multiplies
1073 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1074 about the two SSA names into the new candidate. Return the new
1078 create_mul_ssa_cand (gimple
*gs
, tree base_in
, tree stride_in
, bool speed
)
1080 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1081 tree stype
= NULL_TREE
;
1083 unsigned savings
= 0;
1085 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1087 /* Look at all interpretations of the base candidate, if necessary,
1088 to find information to propagate into this candidate. */
1089 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1092 if (base_cand
->kind
== CAND_MULT
&& integer_onep (base_cand
->stride
))
1098 base
= base_cand
->base_expr
;
1099 index
= base_cand
->index
;
1101 ctype
= base_cand
->cand_type
;
1102 stype
= TREE_TYPE (stride_in
);
1103 if (has_single_use (base_in
))
1104 savings
= (base_cand
->dead_savings
1105 + stmt_cost (base_cand
->cand_stmt
, speed
));
1107 else if (base_cand
->kind
== CAND_ADD
1108 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1110 /* Y = B + (i' * S), S constant
1112 ============================
1113 X = B + ((i' * S) * Z) */
1114 base
= base_cand
->base_expr
;
1115 index
= base_cand
->index
* wi::to_widest (base_cand
->stride
);
1117 ctype
= base_cand
->cand_type
;
1118 stype
= TREE_TYPE (stride_in
);
1119 if (has_single_use (base_in
))
1120 savings
= (base_cand
->dead_savings
1121 + stmt_cost (base_cand
->cand_stmt
, speed
));
1124 base_cand
= lookup_cand (base_cand
->next_interp
);
1129 /* No interpretations had anything useful to propagate, so
1130 produce X = (Y + 0) * Z. */
1134 ctype
= TREE_TYPE (base_in
);
1135 stype
= TREE_TYPE (stride_in
);
1138 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1139 ctype
, stype
, savings
);
1143 /* Create a candidate entry for a statement GS, where GS multiplies
1144 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1145 information about BASE_IN into the new candidate. Return the new
1149 create_mul_imm_cand (gimple
*gs
, tree base_in
, tree stride_in
, bool speed
)
1151 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1152 widest_int index
, temp
;
1153 unsigned savings
= 0;
1155 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1157 /* Look at all interpretations of the base candidate, if necessary,
1158 to find information to propagate into this candidate. */
1159 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1161 if (base_cand
->kind
== CAND_MULT
1162 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1164 /* Y = (B + i') * S, S constant
1166 ============================
1167 X = (B + i') * (S * c) */
1168 temp
= wi::to_widest (base_cand
->stride
) * wi::to_widest (stride_in
);
1169 if (wi::fits_to_tree_p (temp
, TREE_TYPE (stride_in
)))
1171 base
= base_cand
->base_expr
;
1172 index
= base_cand
->index
;
1173 stride
= wide_int_to_tree (TREE_TYPE (stride_in
), temp
);
1174 ctype
= base_cand
->cand_type
;
1175 if (has_single_use (base_in
))
1176 savings
= (base_cand
->dead_savings
1177 + stmt_cost (base_cand
->cand_stmt
, speed
));
1180 else if (base_cand
->kind
== CAND_ADD
&& integer_onep (base_cand
->stride
))
1184 ===========================
1186 base
= base_cand
->base_expr
;
1187 index
= base_cand
->index
;
1189 ctype
= base_cand
->cand_type
;
1190 if (has_single_use (base_in
))
1191 savings
= (base_cand
->dead_savings
1192 + stmt_cost (base_cand
->cand_stmt
, speed
));
1194 else if (base_cand
->kind
== CAND_ADD
1195 && base_cand
->index
== 1
1196 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1198 /* Y = B + (1 * S), S constant
1200 ===========================
1202 base
= base_cand
->base_expr
;
1203 index
= wi::to_widest (base_cand
->stride
);
1205 ctype
= base_cand
->cand_type
;
1206 if (has_single_use (base_in
))
1207 savings
= (base_cand
->dead_savings
1208 + stmt_cost (base_cand
->cand_stmt
, speed
));
1211 base_cand
= lookup_cand (base_cand
->next_interp
);
1216 /* No interpretations had anything useful to propagate, so
1217 produce X = (Y + 0) * c. */
1221 ctype
= TREE_TYPE (base_in
);
1224 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1225 ctype
, sizetype
, savings
);
1229 /* Given GS which is a multiply of scalar integers, make an appropriate
1230 entry in the candidate table. If this is a multiply of two SSA names,
1231 create two CAND_MULT interpretations and attempt to find a basis for
1232 each of them. Otherwise, create a single CAND_MULT and attempt to
1236 slsr_process_mul (gimple
*gs
, tree rhs1
, tree rhs2
, bool speed
)
1240 /* If this is a multiply of an SSA name with itself, it is highly
1241 unlikely that we will get a strength reduction opportunity, so
1242 don't record it as a candidate. This simplifies the logic for
1243 finding a basis, so if this is removed that must be considered. */
1247 if (TREE_CODE (rhs2
) == SSA_NAME
)
1249 /* Record an interpretation of this statement in the candidate table
1250 assuming RHS1 is the base expression and RHS2 is the stride. */
1251 c
= create_mul_ssa_cand (gs
, rhs1
, rhs2
, speed
);
1253 /* Add the first interpretation to the statement-candidate mapping. */
1254 add_cand_for_stmt (gs
, c
);
1256 /* Record another interpretation of this statement assuming RHS1
1257 is the stride and RHS2 is the base expression. */
1258 c2
= create_mul_ssa_cand (gs
, rhs2
, rhs1
, speed
);
1259 c
->next_interp
= c2
->cand_num
;
1260 c2
->first_interp
= c
->cand_num
;
1262 else if (TREE_CODE (rhs2
) == INTEGER_CST
&& !integer_zerop (rhs2
))
1264 /* Record an interpretation for the multiply-immediate. */
1265 c
= create_mul_imm_cand (gs
, rhs1
, rhs2
, speed
);
1267 /* Add the interpretation to the statement-candidate mapping. */
1268 add_cand_for_stmt (gs
, c
);
1272 /* Create a candidate entry for a statement GS, where GS adds two
1273 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1274 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1275 information about the two SSA names into the new candidate.
1276 Return the new candidate. */
1279 create_add_ssa_cand (gimple
*gs
, tree base_in
, tree addend_in
,
1280 bool subtract_p
, bool speed
)
1282 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1283 tree stype
= NULL_TREE
;
1285 unsigned savings
= 0;
1287 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1288 slsr_cand_t addend_cand
= base_cand_from_table (addend_in
);
1290 /* The most useful transformation is a multiply-immediate feeding
1291 an add or subtract. Look for that first. */
1292 while (addend_cand
&& !base
&& addend_cand
->kind
!= CAND_PHI
)
1294 if (addend_cand
->kind
== CAND_MULT
1295 && addend_cand
->index
== 0
1296 && TREE_CODE (addend_cand
->stride
) == INTEGER_CST
)
1298 /* Z = (B + 0) * S, S constant
1300 ===========================
1301 X = Y + ((+/-1 * S) * B) */
1303 index
= wi::to_widest (addend_cand
->stride
);
1306 stride
= addend_cand
->base_expr
;
1307 ctype
= TREE_TYPE (base_in
);
1308 stype
= addend_cand
->cand_type
;
1309 if (has_single_use (addend_in
))
1310 savings
= (addend_cand
->dead_savings
1311 + stmt_cost (addend_cand
->cand_stmt
, speed
));
1314 addend_cand
= lookup_cand (addend_cand
->next_interp
);
1317 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1319 if (base_cand
->kind
== CAND_ADD
1320 && (base_cand
->index
== 0
1321 || operand_equal_p (base_cand
->stride
,
1322 integer_zero_node
, 0)))
1324 /* Y = B + (i' * S), i' * S = 0
1326 ============================
1327 X = B + (+/-1 * Z) */
1328 base
= base_cand
->base_expr
;
1329 index
= subtract_p
? -1 : 1;
1331 ctype
= base_cand
->cand_type
;
1332 stype
= (TREE_CODE (addend_in
) == INTEGER_CST
? sizetype
1333 : TREE_TYPE (addend_in
));
1334 if (has_single_use (base_in
))
1335 savings
= (base_cand
->dead_savings
1336 + stmt_cost (base_cand
->cand_stmt
, speed
));
1338 else if (subtract_p
)
1340 slsr_cand_t subtrahend_cand
= base_cand_from_table (addend_in
);
1342 while (subtrahend_cand
&& !base
&& subtrahend_cand
->kind
!= CAND_PHI
)
1344 if (subtrahend_cand
->kind
== CAND_MULT
1345 && subtrahend_cand
->index
== 0
1346 && TREE_CODE (subtrahend_cand
->stride
) == INTEGER_CST
)
1348 /* Z = (B + 0) * S, S constant
1350 ===========================
1351 Value: X = Y + ((-1 * S) * B) */
1353 index
= wi::to_widest (subtrahend_cand
->stride
);
1355 stride
= subtrahend_cand
->base_expr
;
1356 ctype
= TREE_TYPE (base_in
);
1357 stype
= subtrahend_cand
->cand_type
;
1358 if (has_single_use (addend_in
))
1359 savings
= (subtrahend_cand
->dead_savings
1360 + stmt_cost (subtrahend_cand
->cand_stmt
, speed
));
1363 subtrahend_cand
= lookup_cand (subtrahend_cand
->next_interp
);
1367 base_cand
= lookup_cand (base_cand
->next_interp
);
1372 /* No interpretations had anything useful to propagate, so
1373 produce X = Y + (1 * Z). */
1375 index
= subtract_p
? -1 : 1;
1377 ctype
= TREE_TYPE (base_in
);
1378 stype
= (TREE_CODE (addend_in
) == INTEGER_CST
? sizetype
1379 : TREE_TYPE (addend_in
));
1382 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, base
, index
, stride
,
1383 ctype
, stype
, savings
);
1387 /* Create a candidate entry for a statement GS, where GS adds SSA
1388 name BASE_IN to constant INDEX_IN. Propagate any known information
1389 about BASE_IN into the new candidate. Return the new candidate. */
1392 create_add_imm_cand (gimple
*gs
, tree base_in
, const widest_int
&index_in
,
1395 enum cand_kind kind
= CAND_ADD
;
1396 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1397 tree stype
= NULL_TREE
;
1398 widest_int index
, multiple
;
1399 unsigned savings
= 0;
1401 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1403 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1405 signop sign
= TYPE_SIGN (TREE_TYPE (base_cand
->stride
));
1407 if (TREE_CODE (base_cand
->stride
) == INTEGER_CST
1408 && wi::multiple_of_p (index_in
, wi::to_widest (base_cand
->stride
),
1411 /* Y = (B + i') * S, S constant, c = kS for some integer k
1413 ============================
1414 X = (B + (i'+ k)) * S
1416 Y = B + (i' * S), S constant, c = kS for some integer k
1418 ============================
1419 X = (B + (i'+ k)) * S */
1420 kind
= base_cand
->kind
;
1421 base
= base_cand
->base_expr
;
1422 index
= base_cand
->index
+ multiple
;
1423 stride
= base_cand
->stride
;
1424 ctype
= base_cand
->cand_type
;
1425 stype
= base_cand
->stride_type
;
1426 if (has_single_use (base_in
))
1427 savings
= (base_cand
->dead_savings
1428 + stmt_cost (base_cand
->cand_stmt
, speed
));
1431 base_cand
= lookup_cand (base_cand
->next_interp
);
1436 /* No interpretations had anything useful to propagate, so
1437 produce X = Y + (c * 1). */
1441 stride
= integer_one_node
;
1442 ctype
= TREE_TYPE (base_in
);
1446 c
= alloc_cand_and_find_basis (kind
, gs
, base
, index
, stride
,
1447 ctype
, stype
, savings
);
1451 /* Given GS which is an add or subtract of scalar integers or pointers,
1452 make at least one appropriate entry in the candidate table. */
1455 slsr_process_add (gimple
*gs
, tree rhs1
, tree rhs2
, bool speed
)
1457 bool subtract_p
= gimple_assign_rhs_code (gs
) == MINUS_EXPR
;
1458 slsr_cand_t c
= NULL
, c2
;
1460 if (TREE_CODE (rhs2
) == SSA_NAME
)
1462 /* First record an interpretation assuming RHS1 is the base expression
1463 and RHS2 is the stride. But it doesn't make sense for the
1464 stride to be a pointer, so don't record a candidate in that case. */
1465 if (!POINTER_TYPE_P (TREE_TYPE (rhs2
)))
1467 c
= create_add_ssa_cand (gs
, rhs1
, rhs2
, subtract_p
, speed
);
1469 /* Add the first interpretation to the statement-candidate
1471 add_cand_for_stmt (gs
, c
);
1474 /* If the two RHS operands are identical, or this is a subtract,
1476 if (operand_equal_p (rhs1
, rhs2
, 0) || subtract_p
)
1479 /* Otherwise, record another interpretation assuming RHS2 is the
1480 base expression and RHS1 is the stride, again provided that the
1481 stride is not a pointer. */
1482 if (!POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1484 c2
= create_add_ssa_cand (gs
, rhs2
, rhs1
, false, speed
);
1487 c
->next_interp
= c2
->cand_num
;
1488 c2
->first_interp
= c
->cand_num
;
1491 add_cand_for_stmt (gs
, c2
);
1494 else if (TREE_CODE (rhs2
) == INTEGER_CST
)
1496 /* Record an interpretation for the add-immediate. */
1497 widest_int index
= wi::to_widest (rhs2
);
1501 c
= create_add_imm_cand (gs
, rhs1
, index
, speed
);
1503 /* Add the interpretation to the statement-candidate mapping. */
1504 add_cand_for_stmt (gs
, c
);
1508 /* Given GS which is a negate of a scalar integer, make an appropriate
1509 entry in the candidate table. A negate is equivalent to a multiply
1513 slsr_process_neg (gimple
*gs
, tree rhs1
, bool speed
)
1515 /* Record a CAND_MULT interpretation for the multiply by -1. */
1516 slsr_cand_t c
= create_mul_imm_cand (gs
, rhs1
, integer_minus_one_node
, speed
);
1518 /* Add the interpretation to the statement-candidate mapping. */
1519 add_cand_for_stmt (gs
, c
);
1522 /* Help function for legal_cast_p, operating on two trees. Checks
1523 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1524 for more details. */
1527 legal_cast_p_1 (tree lhs_type
, tree rhs_type
)
1529 unsigned lhs_size
, rhs_size
;
1530 bool lhs_wraps
, rhs_wraps
;
1532 lhs_size
= TYPE_PRECISION (lhs_type
);
1533 rhs_size
= TYPE_PRECISION (rhs_type
);
1534 lhs_wraps
= ANY_INTEGRAL_TYPE_P (lhs_type
) && TYPE_OVERFLOW_WRAPS (lhs_type
);
1535 rhs_wraps
= ANY_INTEGRAL_TYPE_P (rhs_type
) && TYPE_OVERFLOW_WRAPS (rhs_type
);
1537 if (lhs_size
< rhs_size
1538 || (rhs_wraps
&& !lhs_wraps
)
1539 || (rhs_wraps
&& lhs_wraps
&& rhs_size
!= lhs_size
))
1545 /* Return TRUE if GS is a statement that defines an SSA name from
1546 a conversion and is legal for us to combine with an add and multiply
1547 in the candidate table. For example, suppose we have:
1553 Without the type-cast, we would create a CAND_MULT for D with base B,
1554 index i, and stride S. We want to record this candidate only if it
1555 is equivalent to apply the type cast following the multiply:
1561 We will record the type with the candidate for D. This allows us
1562 to use a similar previous candidate as a basis. If we have earlier seen
1568 we can replace D with
1570 D = D' + (i - i') * S;
1572 But if moving the type-cast would change semantics, we mustn't do this.
1574 This is legitimate for casts from a non-wrapping integral type to
1575 any integral type of the same or larger size. It is not legitimate
1576 to convert a wrapping type to a non-wrapping type, or to a wrapping
1577 type of a different size. I.e., with a wrapping type, we must
1578 assume that the addition B + i could wrap, in which case performing
1579 the multiply before or after one of the "illegal" type casts will
1580 have different semantics. */
1583 legal_cast_p (gimple
*gs
, tree rhs
)
1585 if (!is_gimple_assign (gs
)
1586 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
)))
1589 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs
)), TREE_TYPE (rhs
));
1592 /* Given GS which is a cast to a scalar integer type, determine whether
1593 the cast is legal for strength reduction. If so, make at least one
1594 appropriate entry in the candidate table. */
1597 slsr_process_cast (gimple
*gs
, tree rhs1
, bool speed
)
1600 slsr_cand_t base_cand
, c
= NULL
, c2
;
1601 unsigned savings
= 0;
1603 if (!legal_cast_p (gs
, rhs1
))
1606 lhs
= gimple_assign_lhs (gs
);
1607 base_cand
= base_cand_from_table (rhs1
);
1608 ctype
= TREE_TYPE (lhs
);
1610 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1612 slsr_cand_t first_cand
= NULL
;
1616 /* Propagate all data from the base candidate except the type,
1617 which comes from the cast, and the base candidate's cast,
1618 which is no longer applicable. */
1619 if (has_single_use (rhs1
))
1620 savings
= (base_cand
->dead_savings
1621 + stmt_cost (base_cand
->cand_stmt
, speed
));
1623 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1624 base_cand
->base_expr
,
1625 base_cand
->index
, base_cand
->stride
,
1626 ctype
, base_cand
->stride_type
,
1631 if (first_cand
!= c
)
1632 c
->first_interp
= first_cand
->cand_num
;
1634 base_cand
= lookup_cand (base_cand
->next_interp
);
1639 /* If nothing is known about the RHS, create fresh CAND_ADD and
1640 CAND_MULT interpretations:
1645 The first of these is somewhat arbitrary, but the choice of
1646 1 for the stride simplifies the logic for propagating casts
1648 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, 0,
1649 integer_one_node
, ctype
, sizetype
, 0);
1650 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, 0,
1651 integer_one_node
, ctype
, sizetype
, 0);
1652 c
->next_interp
= c2
->cand_num
;
1653 c2
->first_interp
= c
->cand_num
;
1656 /* Add the first (or only) interpretation to the statement-candidate
1658 add_cand_for_stmt (gs
, c
);
1661 /* Given GS which is a copy of a scalar integer type, make at least one
1662 appropriate entry in the candidate table.
1664 This interface is included for completeness, but is unnecessary
1665 if this pass immediately follows a pass that performs copy
1666 propagation, such as DOM. */
1669 slsr_process_copy (gimple
*gs
, tree rhs1
, bool speed
)
1671 slsr_cand_t base_cand
, c
= NULL
, c2
;
1672 unsigned savings
= 0;
1674 base_cand
= base_cand_from_table (rhs1
);
1676 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1678 slsr_cand_t first_cand
= NULL
;
1682 /* Propagate all data from the base candidate. */
1683 if (has_single_use (rhs1
))
1684 savings
= (base_cand
->dead_savings
1685 + stmt_cost (base_cand
->cand_stmt
, speed
));
1687 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1688 base_cand
->base_expr
,
1689 base_cand
->index
, base_cand
->stride
,
1690 base_cand
->cand_type
,
1691 base_cand
->stride_type
, savings
);
1695 if (first_cand
!= c
)
1696 c
->first_interp
= first_cand
->cand_num
;
1698 base_cand
= lookup_cand (base_cand
->next_interp
);
1703 /* If nothing is known about the RHS, create fresh CAND_ADD and
1704 CAND_MULT interpretations:
1709 The first of these is somewhat arbitrary, but the choice of
1710 1 for the stride simplifies the logic for propagating casts
1712 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, 0,
1713 integer_one_node
, TREE_TYPE (rhs1
),
1715 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, 0,
1716 integer_one_node
, TREE_TYPE (rhs1
),
1718 c
->next_interp
= c2
->cand_num
;
1719 c2
->first_interp
= c
->cand_num
;
1722 /* Add the first (or only) interpretation to the statement-candidate
1724 add_cand_for_stmt (gs
, c
);
1727 class find_candidates_dom_walker
: public dom_walker
1730 find_candidates_dom_walker (cdi_direction direction
)
1731 : dom_walker (direction
) {}
1732 virtual edge
before_dom_children (basic_block
);
1735 /* Find strength-reduction candidates in block BB. */
1738 find_candidates_dom_walker::before_dom_children (basic_block bb
)
1740 bool speed
= optimize_bb_for_speed_p (bb
);
1742 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1744 slsr_process_phi (gsi
.phi (), speed
);
1746 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1749 gimple
*gs
= gsi_stmt (gsi
);
1751 if (stmt_could_throw_p (cfun
, gs
))
1754 if (gimple_vuse (gs
) && gimple_assign_single_p (gs
))
1755 slsr_process_ref (gs
);
1757 else if (is_gimple_assign (gs
)
1758 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs
)))
1759 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs
)))))
1761 tree rhs1
= NULL_TREE
, rhs2
= NULL_TREE
;
1763 switch (gimple_assign_rhs_code (gs
))
1767 rhs1
= gimple_assign_rhs1 (gs
);
1768 rhs2
= gimple_assign_rhs2 (gs
);
1769 /* Should never happen, but currently some buggy situations
1770 in earlier phases put constants in rhs1. */
1771 if (TREE_CODE (rhs1
) != SSA_NAME
)
1775 /* Possible future opportunity: rhs1 of a ptr+ can be
1777 case POINTER_PLUS_EXPR
:
1779 rhs2
= gimple_assign_rhs2 (gs
);
1785 rhs1
= gimple_assign_rhs1 (gs
);
1786 if (TREE_CODE (rhs1
) != SSA_NAME
)
1794 switch (gimple_assign_rhs_code (gs
))
1797 slsr_process_mul (gs
, rhs1
, rhs2
, speed
);
1801 case POINTER_PLUS_EXPR
:
1803 slsr_process_add (gs
, rhs1
, rhs2
, speed
);
1807 slsr_process_neg (gs
, rhs1
, speed
);
1811 slsr_process_cast (gs
, rhs1
, speed
);
1815 slsr_process_copy (gs
, rhs1
, speed
);
1826 /* Dump a candidate for debug. */
1829 dump_candidate (slsr_cand_t c
)
1831 fprintf (dump_file
, "%3d [%d] ", c
->cand_num
,
1832 gimple_bb (c
->cand_stmt
)->index
);
1833 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0);
1837 fputs (" MULT : (", dump_file
);
1838 print_generic_expr (dump_file
, c
->base_expr
);
1839 fputs (" + ", dump_file
);
1840 print_decs (c
->index
, dump_file
);
1841 fputs (") * ", dump_file
);
1842 if (TREE_CODE (c
->stride
) != INTEGER_CST
1843 && c
->stride_type
!= TREE_TYPE (c
->stride
))
1845 fputs ("(", dump_file
);
1846 print_generic_expr (dump_file
, c
->stride_type
);
1847 fputs (")", dump_file
);
1849 print_generic_expr (dump_file
, c
->stride
);
1850 fputs (" : ", dump_file
);
1853 fputs (" ADD : ", dump_file
);
1854 print_generic_expr (dump_file
, c
->base_expr
);
1855 fputs (" + (", dump_file
);
1856 print_decs (c
->index
, dump_file
);
1857 fputs (" * ", dump_file
);
1858 if (TREE_CODE (c
->stride
) != INTEGER_CST
1859 && c
->stride_type
!= TREE_TYPE (c
->stride
))
1861 fputs ("(", dump_file
);
1862 print_generic_expr (dump_file
, c
->stride_type
);
1863 fputs (")", dump_file
);
1865 print_generic_expr (dump_file
, c
->stride
);
1866 fputs (") : ", dump_file
);
1869 fputs (" REF : ", dump_file
);
1870 print_generic_expr (dump_file
, c
->base_expr
);
1871 fputs (" + (", dump_file
);
1872 print_generic_expr (dump_file
, c
->stride
);
1873 fputs (") + ", dump_file
);
1874 print_decs (c
->index
, dump_file
);
1875 fputs (" : ", dump_file
);
1878 fputs (" PHI : ", dump_file
);
1879 print_generic_expr (dump_file
, c
->base_expr
);
1880 fputs (" + (unknown * ", dump_file
);
1881 print_generic_expr (dump_file
, c
->stride
);
1882 fputs (") : ", dump_file
);
1887 print_generic_expr (dump_file
, c
->cand_type
);
1888 fprintf (dump_file
, "\n basis: %d dependent: %d sibling: %d\n",
1889 c
->basis
, c
->dependent
, c
->sibling
);
1891 " next-interp: %d first-interp: %d dead-savings: %d\n",
1892 c
->next_interp
, c
->first_interp
, c
->dead_savings
);
1894 fprintf (dump_file
, " phi: %d\n", c
->def_phi
);
1895 fputs ("\n", dump_file
);
1898 /* Dump the candidate vector for debug. */
1901 dump_cand_vec (void)
1906 fprintf (dump_file
, "\nStrength reduction candidate vector:\n\n");
1908 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
1913 /* Callback used to dump the candidate chains hash table. */
1916 ssa_base_cand_dump_callback (cand_chain
**slot
, void *ignored ATTRIBUTE_UNUSED
)
1918 const_cand_chain_t chain
= *slot
;
1921 print_generic_expr (dump_file
, chain
->base_expr
);
1922 fprintf (dump_file
, " -> %d", chain
->cand
->cand_num
);
1924 for (p
= chain
->next
; p
; p
= p
->next
)
1925 fprintf (dump_file
, " -> %d", p
->cand
->cand_num
);
1927 fputs ("\n", dump_file
);
1931 /* Dump the candidate chains. */
1934 dump_cand_chains (void)
1936 fprintf (dump_file
, "\nStrength reduction candidate chains:\n\n");
1937 base_cand_map
->traverse_noresize
<void *, ssa_base_cand_dump_callback
>
1939 fputs ("\n", dump_file
);
1942 /* Dump the increment vector for debug. */
1945 dump_incr_vec (void)
1947 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1951 fprintf (dump_file
, "\nIncrement vector:\n\n");
1953 for (i
= 0; i
< incr_vec_len
; i
++)
1955 fprintf (dump_file
, "%3d increment: ", i
);
1956 print_decs (incr_vec
[i
].incr
, dump_file
);
1957 fprintf (dump_file
, "\n count: %d", incr_vec
[i
].count
);
1958 fprintf (dump_file
, "\n cost: %d", incr_vec
[i
].cost
);
1959 fputs ("\n initializer: ", dump_file
);
1960 print_generic_expr (dump_file
, incr_vec
[i
].initializer
);
1961 fputs ("\n\n", dump_file
);
1966 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1970 replace_ref (tree
*expr
, slsr_cand_t c
)
1972 tree add_expr
, mem_ref
, acc_type
= TREE_TYPE (*expr
);
1973 unsigned HOST_WIDE_INT misalign
;
1976 /* Ensure the memory reference carries the minimum alignment
1977 requirement for the data type. See PR58041. */
1978 get_object_alignment_1 (*expr
, &align
, &misalign
);
1980 align
= least_bit_hwi (misalign
);
1981 if (align
< TYPE_ALIGN (acc_type
))
1982 acc_type
= build_aligned_type (acc_type
, align
);
1984 add_expr
= fold_build2 (POINTER_PLUS_EXPR
, c
->cand_type
,
1985 c
->base_expr
, c
->stride
);
1986 mem_ref
= fold_build2 (MEM_REF
, acc_type
, add_expr
,
1987 wide_int_to_tree (c
->cand_type
, c
->index
));
1989 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1990 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1991 TREE_OPERAND (mem_ref
, 0)
1992 = force_gimple_operand_gsi (&gsi
, TREE_OPERAND (mem_ref
, 0),
1993 /*simple_p=*/true, NULL
,
1994 /*before=*/true, GSI_SAME_STMT
);
1995 copy_ref_info (mem_ref
, *expr
);
1997 update_stmt (c
->cand_stmt
);
2000 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
2001 dependent of candidate C with an equivalent strength-reduced data
2005 replace_refs (slsr_cand_t c
)
2007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2009 fputs ("Replacing reference: ", dump_file
);
2010 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0);
2013 if (gimple_vdef (c
->cand_stmt
))
2015 tree
*lhs
= gimple_assign_lhs_ptr (c
->cand_stmt
);
2016 replace_ref (lhs
, c
);
2020 tree
*rhs
= gimple_assign_rhs1_ptr (c
->cand_stmt
);
2021 replace_ref (rhs
, c
);
2024 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2026 fputs ("With: ", dump_file
);
2027 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0);
2028 fputs ("\n", dump_file
);
2032 replace_refs (lookup_cand (c
->sibling
));
2035 replace_refs (lookup_cand (c
->dependent
));
2038 /* Return TRUE if candidate C is dependent upon a PHI. */
2041 phi_dependent_cand_p (slsr_cand_t c
)
2043 /* A candidate is not necessarily dependent upon a PHI just because
2044 it has a phi definition for its base name. It may have a basis
2045 that relies upon the same phi definition, in which case the PHI
2046 is irrelevant to this candidate. */
2049 && lookup_cand (c
->basis
)->def_phi
!= c
->def_phi
);
2052 /* Calculate the increment required for candidate C relative to
2056 cand_increment (slsr_cand_t c
)
2060 /* If the candidate doesn't have a basis, just return its own
2061 index. This is useful in record_increments to help us find
2062 an existing initializer. Also, if the candidate's basis is
2063 hidden by a phi, then its own index will be the increment
2064 from the newly introduced phi basis. */
2065 if (!c
->basis
|| phi_dependent_cand_p (c
))
2068 basis
= lookup_cand (c
->basis
);
2069 gcc_assert (operand_equal_p (c
->base_expr
, basis
->base_expr
, 0));
2070 return c
->index
- basis
->index
;
2073 /* Calculate the increment required for candidate C relative to
2074 its basis. If we aren't going to generate pointer arithmetic
2075 for this candidate, return the absolute value of that increment
2078 static inline widest_int
2079 cand_abs_increment (slsr_cand_t c
)
2081 widest_int increment
= cand_increment (c
);
2083 if (!address_arithmetic_p
&& wi::neg_p (increment
))
2084 increment
= -increment
;
2089 /* Return TRUE iff candidate C has already been replaced under
2090 another interpretation. */
2093 cand_already_replaced (slsr_cand_t c
)
2095 return (gimple_bb (c
->cand_stmt
) == 0);
2098 /* Common logic used by replace_unconditional_candidate and
2099 replace_conditional_candidate. */
2102 replace_mult_candidate (slsr_cand_t c
, tree basis_name
, widest_int bump
)
2104 tree target_type
= TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
));
2105 enum tree_code cand_code
= gimple_assign_rhs_code (c
->cand_stmt
);
2107 /* It is not useful to replace casts, copies, negates, or adds of
2108 an SSA name and a constant. */
2109 if (cand_code
== SSA_NAME
2110 || CONVERT_EXPR_CODE_P (cand_code
)
2111 || cand_code
== PLUS_EXPR
2112 || cand_code
== POINTER_PLUS_EXPR
2113 || cand_code
== MINUS_EXPR
2114 || cand_code
== NEGATE_EXPR
)
2117 enum tree_code code
= PLUS_EXPR
;
2119 gimple
*stmt_to_print
= NULL
;
2121 if (wi::neg_p (bump
))
2127 /* It is possible that the resulting bump doesn't fit in target_type.
2128 Abandon the replacement in this case. This does not affect
2129 siblings or dependents of C. */
2130 if (bump
!= wi::ext (bump
, TYPE_PRECISION (target_type
),
2131 TYPE_SIGN (target_type
)))
2134 bump_tree
= wide_int_to_tree (target_type
, bump
);
2136 /* If the basis name and the candidate's LHS have incompatible types,
2137 introduce a cast. */
2138 if (!useless_type_conversion_p (target_type
, TREE_TYPE (basis_name
)))
2139 basis_name
= introduce_cast_before_cand (c
, target_type
, basis_name
);
2141 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2143 fputs ("Replacing: ", dump_file
);
2144 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0);
2149 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
2150 gassign
*copy_stmt
= gimple_build_assign (lhs
, basis_name
);
2151 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
2152 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
2153 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
2154 gsi_replace (&gsi
, copy_stmt
, false);
2157 cc
->cand_stmt
= copy_stmt
;
2158 cc
= lookup_cand (cc
->next_interp
);
2160 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2161 stmt_to_print
= copy_stmt
;
2166 if (cand_code
!= NEGATE_EXPR
) {
2167 rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2168 rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2170 if (cand_code
!= NEGATE_EXPR
2171 && ((operand_equal_p (rhs1
, basis_name
, 0)
2172 && operand_equal_p (rhs2
, bump_tree
, 0))
2173 || (operand_equal_p (rhs1
, bump_tree
, 0)
2174 && operand_equal_p (rhs2
, basis_name
, 0))))
2176 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2178 fputs ("(duplicate, not actually replacing)", dump_file
);
2179 stmt_to_print
= c
->cand_stmt
;
2184 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
2185 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
2186 gimple_assign_set_rhs_with_ops (&gsi
, code
, basis_name
, bump_tree
);
2187 update_stmt (gsi_stmt (gsi
));
2190 cc
->cand_stmt
= gsi_stmt (gsi
);
2191 cc
= lookup_cand (cc
->next_interp
);
2193 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2194 stmt_to_print
= gsi_stmt (gsi
);
2198 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2200 fputs ("With: ", dump_file
);
2201 print_gimple_stmt (dump_file
, stmt_to_print
, 0);
2202 fputs ("\n", dump_file
);
2206 /* Replace candidate C with an add or subtract. Note that we only
2207 operate on CAND_MULTs with known strides, so we will never generate
2208 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2209 X = Y + ((i - i') * S), as described in the module commentary. The
2210 folded value ((i - i') * S) is referred to here as the "bump." */
2213 replace_unconditional_candidate (slsr_cand_t c
)
2217 if (cand_already_replaced (c
))
2220 basis
= lookup_cand (c
->basis
);
2221 widest_int bump
= cand_increment (c
) * wi::to_widest (c
->stride
);
2223 replace_mult_candidate (c
, gimple_assign_lhs (basis
->cand_stmt
), bump
);
2226 /* Return the index in the increment vector of the given INCREMENT,
2227 or -1 if not found. The latter can occur if more than
2228 MAX_INCR_VEC_LEN increments have been found. */
2231 incr_vec_index (const widest_int
&increment
)
2235 for (i
= 0; i
< incr_vec_len
&& increment
!= incr_vec
[i
].incr
; i
++)
2238 if (i
< incr_vec_len
)
2244 /* Create a new statement along edge E to add BASIS_NAME to the product
2245 of INCREMENT and the stride of candidate C. Create and return a new
2246 SSA name from *VAR to be used as the LHS of the new statement.
2247 KNOWN_STRIDE is true iff C's stride is a constant. */
2250 create_add_on_incoming_edge (slsr_cand_t c
, tree basis_name
,
2251 widest_int increment
, edge e
, location_t loc
,
2254 tree lhs
, basis_type
;
2255 gassign
*new_stmt
, *cast_stmt
= NULL
;
2257 /* If the add candidate along this incoming edge has the same
2258 index as C's hidden basis, the hidden basis represents this
2263 basis_type
= TREE_TYPE (basis_name
);
2264 lhs
= make_temp_ssa_name (basis_type
, NULL
, "slsr");
2266 /* Occasionally people convert integers to pointers without a
2267 cast, leading us into trouble if we aren't careful. */
2268 enum tree_code plus_code
2269 = POINTER_TYPE_P (basis_type
) ? POINTER_PLUS_EXPR
: PLUS_EXPR
;
2274 enum tree_code code
= plus_code
;
2275 widest_int bump
= increment
* wi::to_widest (c
->stride
);
2276 if (wi::neg_p (bump
) && !POINTER_TYPE_P (basis_type
))
2282 tree stride_type
= POINTER_TYPE_P (basis_type
) ? sizetype
: basis_type
;
2283 bump_tree
= wide_int_to_tree (stride_type
, bump
);
2284 new_stmt
= gimple_build_assign (lhs
, code
, basis_name
, bump_tree
);
2289 bool negate_incr
= !POINTER_TYPE_P (basis_type
) && wi::neg_p (increment
);
2290 i
= incr_vec_index (negate_incr
? -increment
: increment
);
2291 gcc_assert (i
>= 0);
2293 if (incr_vec
[i
].initializer
)
2295 enum tree_code code
= negate_incr
? MINUS_EXPR
: plus_code
;
2296 new_stmt
= gimple_build_assign (lhs
, code
, basis_name
,
2297 incr_vec
[i
].initializer
);
2302 if (!types_compatible_p (TREE_TYPE (c
->stride
), c
->stride_type
))
2304 tree cast_stride
= make_temp_ssa_name (c
->stride_type
, NULL
,
2306 cast_stmt
= gimple_build_assign (cast_stride
, NOP_EXPR
,
2308 stride
= cast_stride
;
2314 new_stmt
= gimple_build_assign (lhs
, plus_code
, basis_name
, stride
);
2315 else if (increment
== -1)
2316 new_stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, basis_name
, stride
);
2324 gimple_set_location (cast_stmt
, loc
);
2325 gsi_insert_on_edge (e
, cast_stmt
);
2328 gimple_set_location (new_stmt
, loc
);
2329 gsi_insert_on_edge (e
, new_stmt
);
2331 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2335 fprintf (dump_file
, "Inserting cast on edge %d->%d: ",
2336 e
->src
->index
, e
->dest
->index
);
2337 print_gimple_stmt (dump_file
, cast_stmt
, 0);
2339 fprintf (dump_file
, "Inserting on edge %d->%d: ", e
->src
->index
,
2341 print_gimple_stmt (dump_file
, new_stmt
, 0);
2347 /* Clear the visited field for a tree of PHI candidates. */
2350 clear_visited (gphi
*phi
)
2353 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2355 if (phi_cand
->visited
)
2357 phi_cand
->visited
= 0;
2359 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2361 tree arg
= gimple_phi_arg_def (phi
, i
);
2362 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2363 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2364 clear_visited (as_a
<gphi
*> (arg_def
));
2369 /* Recursive helper function for create_phi_basis. */
2372 create_phi_basis_1 (slsr_cand_t c
, gimple
*from_phi
, tree basis_name
,
2373 location_t loc
, bool known_stride
)
2378 slsr_cand_t basis
= lookup_cand (c
->basis
);
2379 int nargs
= gimple_phi_num_args (from_phi
);
2380 basic_block phi_bb
= gimple_bb (from_phi
);
2381 slsr_cand_t phi_cand
= *stmt_cand_map
->get (from_phi
);
2382 auto_vec
<tree
> phi_args (nargs
);
2384 if (phi_cand
->visited
)
2385 return phi_cand
->cached_basis
;
2386 phi_cand
->visited
= 1;
2388 /* Process each argument of the existing phi that represents
2389 conditionally-executed add candidates. */
2390 for (i
= 0; i
< nargs
; i
++)
2392 edge e
= (*phi_bb
->preds
)[i
];
2393 tree arg
= gimple_phi_arg_def (from_phi
, i
);
2396 /* If the phi argument is the base name of the CAND_PHI, then
2397 this incoming arc should use the hidden basis. */
2398 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2399 if (basis
->index
== 0)
2400 feeding_def
= gimple_assign_lhs (basis
->cand_stmt
);
2403 widest_int incr
= -basis
->index
;
2404 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, incr
,
2405 e
, loc
, known_stride
);
2409 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2411 /* If there is another phi along this incoming edge, we must
2412 process it in the same fashion to ensure that all basis
2413 adjustments are made along its incoming edges. */
2414 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2415 feeding_def
= create_phi_basis_1 (c
, arg_def
, basis_name
,
2419 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2420 widest_int diff
= arg_cand
->index
- basis
->index
;
2421 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, diff
,
2422 e
, loc
, known_stride
);
2426 /* Because of recursion, we need to save the arguments in a vector
2427 so we can create the PHI statement all at once. Otherwise the
2428 storage for the half-created PHI can be reclaimed. */
2429 phi_args
.safe_push (feeding_def
);
2432 /* Create the new phi basis. */
2433 name
= make_temp_ssa_name (TREE_TYPE (basis_name
), NULL
, "slsr");
2434 phi
= create_phi_node (name
, phi_bb
);
2435 SSA_NAME_DEF_STMT (name
) = phi
;
2437 FOR_EACH_VEC_ELT (phi_args
, i
, phi_arg
)
2439 edge e
= (*phi_bb
->preds
)[i
];
2440 add_phi_arg (phi
, phi_arg
, e
, loc
);
2445 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2447 fputs ("Introducing new phi basis: ", dump_file
);
2448 print_gimple_stmt (dump_file
, phi
, 0);
2451 phi_cand
->cached_basis
= name
;
2455 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2456 is hidden by the phi node FROM_PHI, create a new phi node in the same
2457 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2458 with its phi arguments representing conditional adjustments to the
2459 hidden basis along conditional incoming paths. Those adjustments are
2460 made by creating add statements (and sometimes recursively creating
2461 phis) along those incoming paths. LOC is the location to attach to
2462 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2466 create_phi_basis (slsr_cand_t c
, gimple
*from_phi
, tree basis_name
,
2467 location_t loc
, bool known_stride
)
2469 tree retval
= create_phi_basis_1 (c
, from_phi
, basis_name
, loc
,
2471 gcc_assert (retval
);
2472 clear_visited (as_a
<gphi
*> (from_phi
));
2476 /* Given a candidate C whose basis is hidden by at least one intervening
2477 phi, introduce a matching number of new phis to represent its basis
2478 adjusted by conditional increments along possible incoming paths. Then
2479 replace C as though it were an unconditional candidate, using the new
2483 replace_conditional_candidate (slsr_cand_t c
)
2485 tree basis_name
, name
;
2489 /* Look up the LHS SSA name from C's basis. This will be the
2490 RHS1 of the adds we will introduce to create new phi arguments. */
2491 basis
= lookup_cand (c
->basis
);
2492 basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
2494 /* Create a new phi statement which will represent C's true basis
2495 after the transformation is complete. */
2496 loc
= gimple_location (c
->cand_stmt
);
2497 name
= create_phi_basis (c
, lookup_cand (c
->def_phi
)->cand_stmt
,
2498 basis_name
, loc
, KNOWN_STRIDE
);
2500 /* Replace C with an add of the new basis phi and a constant. */
2501 widest_int bump
= c
->index
* wi::to_widest (c
->stride
);
2503 replace_mult_candidate (c
, name
, bump
);
2506 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2507 how many PHI nodes we have visited at this point in the tree walk. */
2510 phi_add_costs_1 (gimple
*phi
, slsr_cand_t c
, int one_add_cost
, int *spread
)
2514 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2516 if (phi_cand
->visited
)
2519 phi_cand
->visited
= 1;
2522 /* If we work our way back to a phi that isn't dominated by the hidden
2523 basis, this isn't a candidate for replacement. Indicate this by
2524 returning an unreasonably high cost. It's not easy to detect
2525 these situations when determining the basis, so we defer the
2526 decision until now. */
2527 basic_block phi_bb
= gimple_bb (phi
);
2528 slsr_cand_t basis
= lookup_cand (c
->basis
);
2529 basic_block basis_bb
= gimple_bb (basis
->cand_stmt
);
2531 if (phi_bb
== basis_bb
|| !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
2532 return COST_INFINITE
;
2534 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2536 tree arg
= gimple_phi_arg_def (phi
, i
);
2538 if (arg
!= phi_cand
->base_expr
)
2540 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2542 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2544 cost
+= phi_add_costs_1 (arg_def
, c
, one_add_cost
, spread
);
2546 if (cost
>= COST_INFINITE
|| *spread
> MAX_SPREAD
)
2547 return COST_INFINITE
;
2551 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2553 if (arg_cand
->index
!= c
->index
)
2554 cost
+= one_add_cost
;
2562 /* Compute the expected costs of inserting basis adjustments for
2563 candidate C with phi-definition PHI. The cost of inserting
2564 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2565 which are themselves phi results, recursively calculate costs
2566 for those phis as well. */
2569 phi_add_costs (gimple
*phi
, slsr_cand_t c
, int one_add_cost
)
2572 int retval
= phi_add_costs_1 (phi
, c
, one_add_cost
, &spread
);
2573 clear_visited (as_a
<gphi
*> (phi
));
2576 /* For candidate C, each sibling of candidate C, and each dependent of
2577 candidate C, determine whether the candidate is dependent upon a
2578 phi that hides its basis. If not, replace the candidate unconditionally.
2579 Otherwise, determine whether the cost of introducing compensation code
2580 for the candidate is offset by the gains from strength reduction. If
2581 so, replace the candidate and introduce the compensation code. */
2584 replace_uncond_cands_and_profitable_phis (slsr_cand_t c
)
2586 if (phi_dependent_cand_p (c
))
2588 /* A multiply candidate with a stride of 1 is just an artifice
2589 of a copy or cast; there is no value in replacing it. */
2590 if (c
->kind
== CAND_MULT
&& wi::to_widest (c
->stride
) != 1)
2592 /* A candidate dependent upon a phi will replace a multiply by
2593 a constant with an add, and will insert at most one add for
2594 each phi argument. Add these costs with the potential dead-code
2595 savings to determine profitability. */
2596 bool speed
= optimize_bb_for_speed_p (gimple_bb (c
->cand_stmt
));
2597 int mult_savings
= stmt_cost (c
->cand_stmt
, speed
);
2598 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2599 tree phi_result
= gimple_phi_result (phi
);
2600 int one_add_cost
= add_cost (speed
,
2601 TYPE_MODE (TREE_TYPE (phi_result
)));
2602 int add_costs
= one_add_cost
+ phi_add_costs (phi
, c
, one_add_cost
);
2603 int cost
= add_costs
- mult_savings
- c
->dead_savings
;
2605 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2607 fprintf (dump_file
, " Conditional candidate %d:\n", c
->cand_num
);
2608 fprintf (dump_file
, " add_costs = %d\n", add_costs
);
2609 fprintf (dump_file
, " mult_savings = %d\n", mult_savings
);
2610 fprintf (dump_file
, " dead_savings = %d\n", c
->dead_savings
);
2611 fprintf (dump_file
, " cost = %d\n", cost
);
2612 if (cost
<= COST_NEUTRAL
)
2613 fputs (" Replacing...\n", dump_file
);
2615 fputs (" Not replaced.\n", dump_file
);
2618 if (cost
<= COST_NEUTRAL
)
2619 replace_conditional_candidate (c
);
2623 replace_unconditional_candidate (c
);
2626 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->sibling
));
2629 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->dependent
));
2632 /* Count the number of candidates in the tree rooted at C that have
2633 not already been replaced under other interpretations. */
2636 count_candidates (slsr_cand_t c
)
2638 unsigned count
= cand_already_replaced (c
) ? 0 : 1;
2641 count
+= count_candidates (lookup_cand (c
->sibling
));
2644 count
+= count_candidates (lookup_cand (c
->dependent
));
2649 /* Increase the count of INCREMENT by one in the increment vector.
2650 INCREMENT is associated with candidate C. If INCREMENT is to be
2651 conditionally executed as part of a conditional candidate replacement,
2652 IS_PHI_ADJUST is true, otherwise false. If an initializer
2653 T_0 = stride * I is provided by a candidate that dominates all
2654 candidates with the same increment, also record T_0 for subsequent use. */
2657 record_increment (slsr_cand_t c
, widest_int increment
, bool is_phi_adjust
)
2662 /* Treat increments that differ only in sign as identical so as to
2663 share initializers, unless we are generating pointer arithmetic. */
2664 if (!address_arithmetic_p
&& wi::neg_p (increment
))
2665 increment
= -increment
;
2667 for (i
= 0; i
< incr_vec_len
; i
++)
2669 if (incr_vec
[i
].incr
== increment
)
2671 incr_vec
[i
].count
++;
2674 /* If we previously recorded an initializer that doesn't
2675 dominate this candidate, it's not going to be useful to
2677 if (incr_vec
[i
].initializer
2678 && !dominated_by_p (CDI_DOMINATORS
,
2679 gimple_bb (c
->cand_stmt
),
2680 incr_vec
[i
].init_bb
))
2682 incr_vec
[i
].initializer
= NULL_TREE
;
2683 incr_vec
[i
].init_bb
= NULL
;
2690 if (!found
&& incr_vec_len
< MAX_INCR_VEC_LEN
- 1)
2692 /* The first time we see an increment, create the entry for it.
2693 If this is the root candidate which doesn't have a basis, set
2694 the count to zero. We're only processing it so it can possibly
2695 provide an initializer for other candidates. */
2696 incr_vec
[incr_vec_len
].incr
= increment
;
2697 incr_vec
[incr_vec_len
].count
= c
->basis
|| is_phi_adjust
? 1 : 0;
2698 incr_vec
[incr_vec_len
].cost
= COST_INFINITE
;
2700 /* Optimistically record the first occurrence of this increment
2701 as providing an initializer (if it does); we will revise this
2702 opinion later if it doesn't dominate all other occurrences.
2703 Exception: increments of 0, 1 never need initializers;
2704 and phi adjustments don't ever provide initializers. */
2705 if (c
->kind
== CAND_ADD
2707 && c
->index
== increment
2708 && (increment
> 1 || increment
< 0)
2709 && (gimple_assign_rhs_code (c
->cand_stmt
) == PLUS_EXPR
2710 || gimple_assign_rhs_code (c
->cand_stmt
) == POINTER_PLUS_EXPR
))
2712 tree t0
= NULL_TREE
;
2713 tree rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2714 tree rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2715 if (operand_equal_p (rhs1
, c
->base_expr
, 0))
2717 else if (operand_equal_p (rhs2
, c
->base_expr
, 0))
2720 && SSA_NAME_DEF_STMT (t0
)
2721 && gimple_bb (SSA_NAME_DEF_STMT (t0
)))
2723 incr_vec
[incr_vec_len
].initializer
= t0
;
2724 incr_vec
[incr_vec_len
++].init_bb
2725 = gimple_bb (SSA_NAME_DEF_STMT (t0
));
2729 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2730 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2735 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2736 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2741 /* Recursive helper function for record_phi_increments. */
2744 record_phi_increments_1 (slsr_cand_t basis
, gimple
*phi
)
2747 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2749 if (phi_cand
->visited
)
2751 phi_cand
->visited
= 1;
2753 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2755 tree arg
= gimple_phi_arg_def (phi
, i
);
2756 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2758 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2759 record_phi_increments_1 (basis
, arg_def
);
2764 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2766 diff
= -basis
->index
;
2767 record_increment (phi_cand
, diff
, PHI_ADJUST
);
2771 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2772 diff
= arg_cand
->index
- basis
->index
;
2773 record_increment (arg_cand
, diff
, PHI_ADJUST
);
2779 /* Given phi statement PHI that hides a candidate from its BASIS, find
2780 the increments along each incoming arc (recursively handling additional
2781 phis that may be present) and record them. These increments are the
2782 difference in index between the index-adjusting statements and the
2783 index of the basis. */
2786 record_phi_increments (slsr_cand_t basis
, gimple
*phi
)
2788 record_phi_increments_1 (basis
, phi
);
2789 clear_visited (as_a
<gphi
*> (phi
));
2792 /* Determine how many times each unique increment occurs in the set
2793 of candidates rooted at C's parent, recording the data in the
2794 increment vector. For each unique increment I, if an initializer
2795 T_0 = stride * I is provided by a candidate that dominates all
2796 candidates with the same increment, also record T_0 for subsequent
2800 record_increments (slsr_cand_t c
)
2802 if (!cand_already_replaced (c
))
2804 if (!phi_dependent_cand_p (c
))
2805 record_increment (c
, cand_increment (c
), NOT_PHI_ADJUST
);
2808 /* A candidate with a basis hidden by a phi will have one
2809 increment for its relationship to the index represented by
2810 the phi, and potentially additional increments along each
2811 incoming edge. For the root of the dependency tree (which
2812 has no basis), process just the initial index in case it has
2813 an initializer that can be used by subsequent candidates. */
2814 record_increment (c
, c
->index
, NOT_PHI_ADJUST
);
2817 record_phi_increments (lookup_cand (c
->basis
),
2818 lookup_cand (c
->def_phi
)->cand_stmt
);
2823 record_increments (lookup_cand (c
->sibling
));
2826 record_increments (lookup_cand (c
->dependent
));
2829 /* Recursive helper function for phi_incr_cost. */
2832 phi_incr_cost_1 (slsr_cand_t c
, const widest_int
&incr
, gimple
*phi
,
2837 slsr_cand_t basis
= lookup_cand (c
->basis
);
2838 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2840 if (phi_cand
->visited
)
2842 phi_cand
->visited
= 1;
2844 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2846 tree arg
= gimple_phi_arg_def (phi
, i
);
2847 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2849 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2851 int feeding_savings
= 0;
2852 tree feeding_var
= gimple_phi_result (arg_def
);
2853 cost
+= phi_incr_cost_1 (c
, incr
, arg_def
, &feeding_savings
);
2854 if (uses_consumed_by_stmt (feeding_var
, phi
))
2855 *savings
+= feeding_savings
;
2860 slsr_cand_t arg_cand
;
2862 /* When the PHI argument is just a pass-through to the base
2863 expression of the hidden basis, the difference is zero minus
2864 the index of the basis. There is no potential savings by
2865 eliminating a statement in this case. */
2866 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2868 arg_cand
= (slsr_cand_t
)NULL
;
2869 diff
= -basis
->index
;
2873 arg_cand
= base_cand_from_table (arg
);
2874 diff
= arg_cand
->index
- basis
->index
;
2879 tree basis_lhs
= gimple_assign_lhs (basis
->cand_stmt
);
2880 cost
+= add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs
)));
2883 tree lhs
= gimple_assign_lhs (arg_cand
->cand_stmt
);
2884 if (uses_consumed_by_stmt (lhs
, phi
))
2885 *savings
+= stmt_cost (arg_cand
->cand_stmt
, true);
2894 /* Add up and return the costs of introducing add statements that
2895 require the increment INCR on behalf of candidate C and phi
2896 statement PHI. Accumulate into *SAVINGS the potential savings
2897 from removing existing statements that feed PHI and have no other
2901 phi_incr_cost (slsr_cand_t c
, const widest_int
&incr
, gimple
*phi
,
2904 int retval
= phi_incr_cost_1 (c
, incr
, phi
, savings
);
2905 clear_visited (as_a
<gphi
*> (phi
));
2909 /* Return the first candidate in the tree rooted at C that has not
2910 already been replaced, favoring siblings over dependents. */
2913 unreplaced_cand_in_tree (slsr_cand_t c
)
2915 if (!cand_already_replaced (c
))
2920 slsr_cand_t sib
= unreplaced_cand_in_tree (lookup_cand (c
->sibling
));
2927 slsr_cand_t dep
= unreplaced_cand_in_tree (lookup_cand (c
->dependent
));
2935 /* Return TRUE if the candidates in the tree rooted at C should be
2936 optimized for speed, else FALSE. We estimate this based on the block
2937 containing the most dominant candidate in the tree that has not yet
2941 optimize_cands_for_speed_p (slsr_cand_t c
)
2943 slsr_cand_t c2
= unreplaced_cand_in_tree (c
);
2945 return optimize_bb_for_speed_p (gimple_bb (c2
->cand_stmt
));
2948 /* Add COST_IN to the lowest cost of any dependent path starting at
2949 candidate C or any of its siblings, counting only candidates along
2950 such paths with increment INCR. Assume that replacing a candidate
2951 reduces cost by REPL_SAVINGS. Also account for savings from any
2952 statements that would go dead. If COUNT_PHIS is true, include
2953 costs of introducing feeding statements for conditional candidates. */
2956 lowest_cost_path (int cost_in
, int repl_savings
, slsr_cand_t c
,
2957 const widest_int
&incr
, bool count_phis
)
2959 int local_cost
, sib_cost
, savings
= 0;
2960 widest_int cand_incr
= cand_abs_increment (c
);
2962 if (cand_already_replaced (c
))
2963 local_cost
= cost_in
;
2964 else if (incr
== cand_incr
)
2965 local_cost
= cost_in
- repl_savings
- c
->dead_savings
;
2967 local_cost
= cost_in
- c
->dead_savings
;
2970 && phi_dependent_cand_p (c
)
2971 && !cand_already_replaced (c
))
2973 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2974 local_cost
+= phi_incr_cost (c
, incr
, phi
, &savings
);
2976 if (uses_consumed_by_stmt (gimple_phi_result (phi
), c
->cand_stmt
))
2977 local_cost
-= savings
;
2981 local_cost
= lowest_cost_path (local_cost
, repl_savings
,
2982 lookup_cand (c
->dependent
), incr
,
2987 sib_cost
= lowest_cost_path (cost_in
, repl_savings
,
2988 lookup_cand (c
->sibling
), incr
,
2990 local_cost
= MIN (local_cost
, sib_cost
);
2996 /* Compute the total savings that would accrue from all replacements
2997 in the candidate tree rooted at C, counting only candidates with
2998 increment INCR. Assume that replacing a candidate reduces cost
2999 by REPL_SAVINGS. Also account for savings from statements that
3003 total_savings (int repl_savings
, slsr_cand_t c
, const widest_int
&incr
,
3007 widest_int cand_incr
= cand_abs_increment (c
);
3009 if (incr
== cand_incr
&& !cand_already_replaced (c
))
3010 savings
+= repl_savings
+ c
->dead_savings
;
3013 && phi_dependent_cand_p (c
)
3014 && !cand_already_replaced (c
))
3016 int phi_savings
= 0;
3017 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
3018 savings
-= phi_incr_cost (c
, incr
, phi
, &phi_savings
);
3020 if (uses_consumed_by_stmt (gimple_phi_result (phi
), c
->cand_stmt
))
3021 savings
+= phi_savings
;
3025 savings
+= total_savings (repl_savings
, lookup_cand (c
->dependent
), incr
,
3029 savings
+= total_savings (repl_savings
, lookup_cand (c
->sibling
), incr
,
3035 /* Use target-specific costs to determine and record which increments
3036 in the current candidate tree are profitable to replace, assuming
3037 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3040 One slight limitation here is that we don't account for the possible
3041 introduction of casts in some cases. See replace_one_candidate for
3042 the cases where these are introduced. This should probably be cleaned
3046 analyze_increments (slsr_cand_t first_dep
, machine_mode mode
, bool speed
)
3050 for (i
= 0; i
< incr_vec_len
; i
++)
3052 HOST_WIDE_INT incr
= incr_vec
[i
].incr
.to_shwi ();
3054 /* If somehow this increment is bigger than a HWI, we won't
3055 be optimizing candidates that use it. And if the increment
3056 has a count of zero, nothing will be done with it. */
3057 if (!wi::fits_shwi_p (incr_vec
[i
].incr
) || !incr_vec
[i
].count
)
3058 incr_vec
[i
].cost
= COST_INFINITE
;
3060 /* Increments of 0, 1, and -1 are always profitable to replace,
3061 because they always replace a multiply or add with an add or
3062 copy, and may cause one or more existing instructions to go
3063 dead. Exception: -1 can't be assumed to be profitable for
3064 pointer addition. */
3068 && !POINTER_TYPE_P (first_dep
->cand_type
)))
3069 incr_vec
[i
].cost
= COST_NEUTRAL
;
3071 /* If we need to add an initializer, give up if a cast from the
3072 candidate's type to its stride's type can lose precision.
3073 Note that this already takes into account that the stride may
3074 have been cast to a wider type, in which case this test won't
3080 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3082 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3084 Although the stride was a short int initially, the stride
3085 used in the analysis has been widened to an int, and such
3086 widening will be done in the initializer as well. */
3087 else if (!incr_vec
[i
].initializer
3088 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
3089 && !legal_cast_p_1 (first_dep
->stride_type
,
3090 TREE_TYPE (gimple_assign_lhs
3091 (first_dep
->cand_stmt
))))
3092 incr_vec
[i
].cost
= COST_INFINITE
;
3094 /* If we need to add an initializer, make sure we don't introduce
3095 a multiply by a pointer type, which can happen in certain cast
3097 else if (!incr_vec
[i
].initializer
3098 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
3099 && POINTER_TYPE_P (first_dep
->stride_type
))
3100 incr_vec
[i
].cost
= COST_INFINITE
;
3102 /* For any other increment, if this is a multiply candidate, we
3103 must introduce a temporary T and initialize it with
3104 T_0 = stride * increment. When optimizing for speed, walk the
3105 candidate tree to calculate the best cost reduction along any
3106 path; if it offsets the fixed cost of inserting the initializer,
3107 replacing the increment is profitable. When optimizing for
3108 size, instead calculate the total cost reduction from replacing
3109 all candidates with this increment. */
3110 else if (first_dep
->kind
== CAND_MULT
)
3112 int cost
= mult_by_coeff_cost (incr
, mode
, speed
);
3115 if (tree_fits_shwi_p (first_dep
->stride
))
3117 HOST_WIDE_INT hwi_stride
= tree_to_shwi (first_dep
->stride
);
3118 repl_savings
= mult_by_coeff_cost (hwi_stride
, mode
, speed
);
3121 repl_savings
= mul_cost (speed
, mode
);
3122 repl_savings
-= add_cost (speed
, mode
);
3125 cost
= lowest_cost_path (cost
, repl_savings
, first_dep
,
3126 incr_vec
[i
].incr
, COUNT_PHIS
);
3128 cost
-= total_savings (repl_savings
, first_dep
, incr_vec
[i
].incr
,
3131 incr_vec
[i
].cost
= cost
;
3134 /* If this is an add candidate, the initializer may already
3135 exist, so only calculate the cost of the initializer if it
3136 doesn't. We are replacing one add with another here, so the
3137 known replacement savings is zero. We will account for removal
3138 of dead instructions in lowest_cost_path or total_savings. */
3142 if (!incr_vec
[i
].initializer
)
3143 cost
= mult_by_coeff_cost (incr
, mode
, speed
);
3146 cost
= lowest_cost_path (cost
, 0, first_dep
, incr_vec
[i
].incr
,
3149 cost
-= total_savings (0, first_dep
, incr_vec
[i
].incr
,
3152 incr_vec
[i
].cost
= cost
;
3157 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3158 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3159 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3160 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3161 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3164 ncd_for_two_cands (basic_block bb1
, basic_block bb2
,
3165 slsr_cand_t c1
, slsr_cand_t c2
, slsr_cand_t
*where
)
3181 ncd
= nearest_common_dominator (CDI_DOMINATORS
, bb1
, bb2
);
3183 /* If both candidates are in the same block, the earlier
3185 if (bb1
== ncd
&& bb2
== ncd
)
3187 if (!c1
|| (c2
&& c2
->cand_num
< c1
->cand_num
))
3193 /* Otherwise, if one of them produced a candidate in the
3194 dominator, that one wins. */
3195 else if (bb1
== ncd
)
3198 else if (bb2
== ncd
)
3201 /* If neither matches the dominator, neither wins. */
3208 /* Consider all candidates that feed PHI. Find the nearest common
3209 dominator of those candidates requiring the given increment INCR.
3210 Further find and return the nearest common dominator of this result
3211 with block NCD. If the returned block contains one or more of the
3212 candidates, return the earliest candidate in the block in *WHERE. */
3215 ncd_with_phi (slsr_cand_t c
, const widest_int
&incr
, gphi
*phi
,
3216 basic_block ncd
, slsr_cand_t
*where
)
3219 slsr_cand_t basis
= lookup_cand (c
->basis
);
3220 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
3222 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3224 tree arg
= gimple_phi_arg_def (phi
, i
);
3225 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
3227 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3228 ncd
= ncd_with_phi (c
, incr
, as_a
<gphi
*> (arg_def
), ncd
, where
);
3233 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3234 diff
= -basis
->index
;
3237 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3238 diff
= arg_cand
->index
- basis
->index
;
3241 basic_block pred
= gimple_phi_arg_edge (phi
, i
)->src
;
3243 if ((incr
== diff
) || (!address_arithmetic_p
&& incr
== -diff
))
3244 ncd
= ncd_for_two_cands (ncd
, pred
, *where
, NULL
, where
);
3251 /* Consider the candidate C together with any candidates that feed
3252 C's phi dependence (if any). Find and return the nearest common
3253 dominator of those candidates requiring the given increment INCR.
3254 If the returned block contains one or more of the candidates,
3255 return the earliest candidate in the block in *WHERE. */
3258 ncd_of_cand_and_phis (slsr_cand_t c
, const widest_int
&incr
, slsr_cand_t
*where
)
3260 basic_block ncd
= NULL
;
3262 if (cand_abs_increment (c
) == incr
)
3264 ncd
= gimple_bb (c
->cand_stmt
);
3268 if (phi_dependent_cand_p (c
))
3269 ncd
= ncd_with_phi (c
, incr
,
3270 as_a
<gphi
*> (lookup_cand (c
->def_phi
)->cand_stmt
),
3276 /* Consider all candidates in the tree rooted at C for which INCR
3277 represents the required increment of C relative to its basis.
3278 Find and return the basic block that most nearly dominates all
3279 such candidates. If the returned block contains one or more of
3280 the candidates, return the earliest candidate in the block in
3284 nearest_common_dominator_for_cands (slsr_cand_t c
, const widest_int
&incr
,
3287 basic_block sib_ncd
= NULL
, dep_ncd
= NULL
, this_ncd
= NULL
, ncd
;
3288 slsr_cand_t sib_where
= NULL
, dep_where
= NULL
, this_where
= NULL
, new_where
;
3290 /* First find the NCD of all siblings and dependents. */
3292 sib_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->sibling
),
3295 dep_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->dependent
),
3297 if (!sib_ncd
&& !dep_ncd
)
3302 else if (sib_ncd
&& !dep_ncd
)
3304 new_where
= sib_where
;
3307 else if (dep_ncd
&& !sib_ncd
)
3309 new_where
= dep_where
;
3313 ncd
= ncd_for_two_cands (sib_ncd
, dep_ncd
, sib_where
,
3314 dep_where
, &new_where
);
3316 /* If the candidate's increment doesn't match the one we're interested
3317 in (and nor do any increments for feeding defs of a phi-dependence),
3318 then the result depends only on siblings and dependents. */
3319 this_ncd
= ncd_of_cand_and_phis (c
, incr
, &this_where
);
3321 if (!this_ncd
|| cand_already_replaced (c
))
3327 /* Otherwise, compare this candidate with the result from all siblings
3329 ncd
= ncd_for_two_cands (ncd
, this_ncd
, new_where
, this_where
, where
);
3334 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3337 profitable_increment_p (unsigned index
)
3339 return (incr_vec
[index
].cost
<= COST_NEUTRAL
);
3342 /* For each profitable increment in the increment vector not equal to
3343 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3344 dominator of all statements in the candidate chain rooted at C
3345 that require that increment, and insert an initializer
3346 T_0 = stride * increment at that location. Record T_0 with the
3347 increment record. */
3350 insert_initializers (slsr_cand_t c
)
3354 for (i
= 0; i
< incr_vec_len
; i
++)
3357 slsr_cand_t where
= NULL
;
3359 gassign
*cast_stmt
= NULL
;
3360 tree new_name
, incr_tree
, init_stride
;
3361 widest_int incr
= incr_vec
[i
].incr
;
3363 if (!profitable_increment_p (i
)
3366 && (!POINTER_TYPE_P (lookup_cand (c
->basis
)->cand_type
)))
3370 /* We may have already identified an existing initializer that
3372 if (incr_vec
[i
].initializer
)
3374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3376 fputs ("Using existing initializer: ", dump_file
);
3377 print_gimple_stmt (dump_file
,
3378 SSA_NAME_DEF_STMT (incr_vec
[i
].initializer
),
3384 /* Find the block that most closely dominates all candidates
3385 with this increment. If there is at least one candidate in
3386 that block, the earliest one will be returned in WHERE. */
3387 bb
= nearest_common_dominator_for_cands (c
, incr
, &where
);
3389 /* If the NCD is not dominated by the block containing the
3390 definition of the stride, we can't legally insert a
3391 single initializer. Mark the increment as unprofitable
3392 so we don't make any replacements. FIXME: Multiple
3393 initializers could be placed with more analysis. */
3394 gimple
*stride_def
= SSA_NAME_DEF_STMT (c
->stride
);
3395 basic_block stride_bb
= gimple_bb (stride_def
);
3397 if (stride_bb
&& !dominated_by_p (CDI_DOMINATORS
, bb
, stride_bb
))
3399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3401 "Initializer #%d cannot be legally placed\n", i
);
3402 incr_vec
[i
].cost
= COST_INFINITE
;
3406 /* If the nominal stride has a different type than the recorded
3407 stride type, build a cast from the nominal stride to that type. */
3408 if (!types_compatible_p (TREE_TYPE (c
->stride
), c
->stride_type
))
3410 init_stride
= make_temp_ssa_name (c
->stride_type
, NULL
, "slsr");
3411 cast_stmt
= gimple_build_assign (init_stride
, NOP_EXPR
, c
->stride
);
3414 init_stride
= c
->stride
;
3416 /* Create a new SSA name to hold the initializer's value. */
3417 new_name
= make_temp_ssa_name (c
->stride_type
, NULL
, "slsr");
3418 incr_vec
[i
].initializer
= new_name
;
3420 /* Create the initializer and insert it in the latest possible
3421 dominating position. */
3422 incr_tree
= wide_int_to_tree (c
->stride_type
, incr
);
3423 init_stmt
= gimple_build_assign (new_name
, MULT_EXPR
,
3424 init_stride
, incr_tree
);
3427 gimple_stmt_iterator gsi
= gsi_for_stmt (where
->cand_stmt
);
3428 location_t loc
= gimple_location (where
->cand_stmt
);
3432 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3433 gimple_set_location (cast_stmt
, loc
);
3436 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3437 gimple_set_location (init_stmt
, loc
);
3441 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3442 gimple
*basis_stmt
= lookup_cand (c
->basis
)->cand_stmt
;
3443 location_t loc
= gimple_location (basis_stmt
);
3445 if (!gsi_end_p (gsi
) && stmt_ends_bb_p (gsi_stmt (gsi
)))
3449 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3450 gimple_set_location (cast_stmt
, loc
);
3452 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3458 gsi_insert_after (&gsi
, cast_stmt
, GSI_NEW_STMT
);
3459 gimple_set_location (cast_stmt
, loc
);
3461 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
3464 gimple_set_location (init_stmt
, gimple_location (basis_stmt
));
3467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3471 fputs ("Inserting stride cast: ", dump_file
);
3472 print_gimple_stmt (dump_file
, cast_stmt
, 0);
3474 fputs ("Inserting initializer: ", dump_file
);
3475 print_gimple_stmt (dump_file
, init_stmt
, 0);
3480 /* Recursive helper function for all_phi_incrs_profitable. */
3483 all_phi_incrs_profitable_1 (slsr_cand_t c
, gphi
*phi
, int *spread
)
3486 slsr_cand_t basis
= lookup_cand (c
->basis
);
3487 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
3489 if (phi_cand
->visited
)
3492 phi_cand
->visited
= 1;
3495 /* If the basis doesn't dominate the PHI (including when the PHI is
3496 in the same block as the basis), we won't be able to create a PHI
3497 using the basis here. */
3498 basic_block basis_bb
= gimple_bb (basis
->cand_stmt
);
3499 basic_block phi_bb
= gimple_bb (phi
);
3501 if (phi_bb
== basis_bb
3502 || !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
3505 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3507 /* If the PHI arg resides in a block not dominated by the basis,
3508 we won't be able to create a PHI using the basis here. */
3509 basic_block pred_bb
= gimple_phi_arg_edge (phi
, i
)->src
;
3511 if (!dominated_by_p (CDI_DOMINATORS
, pred_bb
, basis_bb
))
3514 tree arg
= gimple_phi_arg_def (phi
, i
);
3515 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
3517 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3519 if (!all_phi_incrs_profitable_1 (c
, as_a
<gphi
*> (arg_def
), spread
)
3520 || *spread
> MAX_SPREAD
)
3526 widest_int increment
;
3528 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3529 increment
= -basis
->index
;
3532 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3533 increment
= arg_cand
->index
- basis
->index
;
3536 if (!address_arithmetic_p
&& wi::neg_p (increment
))
3537 increment
= -increment
;
3539 j
= incr_vec_index (increment
);
3541 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3543 fprintf (dump_file
, " Conditional candidate %d, phi: ",
3545 print_gimple_stmt (dump_file
, phi
, 0);
3546 fputs (" increment: ", dump_file
);
3547 print_decs (increment
, dump_file
);
3550 "\n Not replaced; incr_vec overflow.\n");
3552 fprintf (dump_file
, "\n cost: %d\n", incr_vec
[j
].cost
);
3553 if (profitable_increment_p (j
))
3554 fputs (" Replacing...\n", dump_file
);
3556 fputs (" Not replaced.\n", dump_file
);
3560 if (j
< 0 || !profitable_increment_p (j
))
3568 /* Return TRUE iff all required increments for candidates feeding PHI
3569 are profitable (and legal!) to replace on behalf of candidate C. */
3572 all_phi_incrs_profitable (slsr_cand_t c
, gphi
*phi
)
3575 bool retval
= all_phi_incrs_profitable_1 (c
, phi
, &spread
);
3576 clear_visited (phi
);
3580 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3581 type TO_TYPE, and insert it in front of the statement represented
3582 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3583 the new SSA name. */
3586 introduce_cast_before_cand (slsr_cand_t c
, tree to_type
, tree from_expr
)
3590 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3592 cast_lhs
= make_temp_ssa_name (to_type
, NULL
, "slsr");
3593 cast_stmt
= gimple_build_assign (cast_lhs
, NOP_EXPR
, from_expr
);
3594 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3595 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3597 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3599 fputs (" Inserting: ", dump_file
);
3600 print_gimple_stmt (dump_file
, cast_stmt
, 0);
3606 /* Replace the RHS of the statement represented by candidate C with
3607 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3608 leave C unchanged or just interchange its operands. The original
3609 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3610 If the replacement was made and we are doing a details dump,
3611 return the revised statement, else NULL. */
3614 replace_rhs_if_not_dup (enum tree_code new_code
, tree new_rhs1
, tree new_rhs2
,
3615 enum tree_code old_code
, tree old_rhs1
, tree old_rhs2
,
3618 if (new_code
!= old_code
3619 || ((!operand_equal_p (new_rhs1
, old_rhs1
, 0)
3620 || !operand_equal_p (new_rhs2
, old_rhs2
, 0))
3621 && (!operand_equal_p (new_rhs1
, old_rhs2
, 0)
3622 || !operand_equal_p (new_rhs2
, old_rhs1
, 0))))
3624 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3625 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
3626 gimple_assign_set_rhs_with_ops (&gsi
, new_code
, new_rhs1
, new_rhs2
);
3627 update_stmt (gsi_stmt (gsi
));
3630 cc
->cand_stmt
= gsi_stmt (gsi
);
3631 cc
= lookup_cand (cc
->next_interp
);
3634 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3635 return gsi_stmt (gsi
);
3638 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3639 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3644 /* Strength-reduce the statement represented by candidate C by replacing
3645 it with an equivalent addition or subtraction. I is the index into
3646 the increment vector identifying C's increment. NEW_VAR is used to
3647 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3648 is the rhs1 to use in creating the add/subtract. */
3651 replace_one_candidate (slsr_cand_t c
, unsigned i
, tree basis_name
)
3653 gimple
*stmt_to_print
= NULL
;
3654 tree orig_rhs1
, orig_rhs2
;
3656 enum tree_code orig_code
, repl_code
;
3657 widest_int cand_incr
;
3659 orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3660 orig_rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
3661 orig_rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
3662 cand_incr
= cand_increment (c
);
3664 /* If orig_rhs2 is NULL, we have already replaced this in situ with
3665 a copy statement under another interpretation. */
3669 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3671 fputs ("Replacing: ", dump_file
);
3672 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0);
3673 stmt_to_print
= c
->cand_stmt
;
3676 if (address_arithmetic_p
)
3677 repl_code
= POINTER_PLUS_EXPR
;
3679 repl_code
= PLUS_EXPR
;
3681 /* If the increment has an initializer T_0, replace the candidate
3682 statement with an add of the basis name and the initializer. */
3683 if (incr_vec
[i
].initializer
)
3685 tree init_type
= TREE_TYPE (incr_vec
[i
].initializer
);
3686 tree orig_type
= TREE_TYPE (orig_rhs2
);
3688 if (types_compatible_p (orig_type
, init_type
))
3689 rhs2
= incr_vec
[i
].initializer
;
3691 rhs2
= introduce_cast_before_cand (c
, orig_type
,
3692 incr_vec
[i
].initializer
);
3694 if (incr_vec
[i
].incr
!= cand_incr
)
3696 gcc_assert (repl_code
== PLUS_EXPR
);
3697 repl_code
= MINUS_EXPR
;
3700 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3701 orig_code
, orig_rhs1
, orig_rhs2
,
3705 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3706 with a subtract of the stride from the basis name, a copy
3707 from the basis name, or an add of the stride to the basis
3708 name, respectively. It may be necessary to introduce a
3709 cast (or reuse an existing cast). */
3710 else if (cand_incr
== 1)
3712 tree stride_type
= TREE_TYPE (c
->stride
);
3713 tree orig_type
= TREE_TYPE (orig_rhs2
);
3715 if (types_compatible_p (orig_type
, stride_type
))
3718 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3720 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3721 orig_code
, orig_rhs1
, orig_rhs2
,
3725 else if (cand_incr
== -1)
3727 tree stride_type
= TREE_TYPE (c
->stride
);
3728 tree orig_type
= TREE_TYPE (orig_rhs2
);
3729 gcc_assert (repl_code
!= POINTER_PLUS_EXPR
);
3731 if (types_compatible_p (orig_type
, stride_type
))
3734 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3736 if (orig_code
!= MINUS_EXPR
3737 || !operand_equal_p (basis_name
, orig_rhs1
, 0)
3738 || !operand_equal_p (rhs2
, orig_rhs2
, 0))
3740 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3741 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
3742 gimple_assign_set_rhs_with_ops (&gsi
, MINUS_EXPR
, basis_name
, rhs2
);
3743 update_stmt (gsi_stmt (gsi
));
3746 cc
->cand_stmt
= gsi_stmt (gsi
);
3747 cc
= lookup_cand (cc
->next_interp
);
3750 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3751 stmt_to_print
= gsi_stmt (gsi
);
3753 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3754 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3757 else if (cand_incr
== 0)
3759 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
3760 tree lhs_type
= TREE_TYPE (lhs
);
3761 tree basis_type
= TREE_TYPE (basis_name
);
3763 if (types_compatible_p (lhs_type
, basis_type
))
3765 gassign
*copy_stmt
= gimple_build_assign (lhs
, basis_name
);
3766 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3767 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
3768 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
3769 gsi_replace (&gsi
, copy_stmt
, false);
3772 cc
->cand_stmt
= copy_stmt
;
3773 cc
= lookup_cand (cc
->next_interp
);
3776 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3777 stmt_to_print
= copy_stmt
;
3781 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3782 gassign
*cast_stmt
= gimple_build_assign (lhs
, NOP_EXPR
, basis_name
);
3783 slsr_cand_t cc
= lookup_cand (c
->first_interp
);
3784 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3785 gsi_replace (&gsi
, cast_stmt
, false);
3788 cc
->cand_stmt
= cast_stmt
;
3789 cc
= lookup_cand (cc
->next_interp
);
3792 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3793 stmt_to_print
= cast_stmt
;
3799 if (dump_file
&& (dump_flags
& TDF_DETAILS
) && stmt_to_print
)
3801 fputs ("With: ", dump_file
);
3802 print_gimple_stmt (dump_file
, stmt_to_print
, 0);
3803 fputs ("\n", dump_file
);
3807 /* For each candidate in the tree rooted at C, replace it with
3808 an increment if such has been shown to be profitable. */
3811 replace_profitable_candidates (slsr_cand_t c
)
3813 if (!cand_already_replaced (c
))
3815 widest_int increment
= cand_abs_increment (c
);
3816 enum tree_code orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3819 i
= incr_vec_index (increment
);
3821 /* Only process profitable increments. Nothing useful can be done
3822 to a cast or copy. */
3824 && profitable_increment_p (i
)
3825 && orig_code
!= SSA_NAME
3826 && !CONVERT_EXPR_CODE_P (orig_code
))
3828 if (phi_dependent_cand_p (c
))
3830 gphi
*phi
= as_a
<gphi
*> (lookup_cand (c
->def_phi
)->cand_stmt
);
3832 if (all_phi_incrs_profitable (c
, phi
))
3834 /* Look up the LHS SSA name from C's basis. This will be
3835 the RHS1 of the adds we will introduce to create new
3837 slsr_cand_t basis
= lookup_cand (c
->basis
);
3838 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3840 /* Create a new phi statement that will represent C's true
3841 basis after the transformation is complete. */
3842 location_t loc
= gimple_location (c
->cand_stmt
);
3843 tree name
= create_phi_basis (c
, phi
, basis_name
,
3844 loc
, UNKNOWN_STRIDE
);
3846 /* Replace C with an add of the new basis phi and the
3848 replace_one_candidate (c
, i
, name
);
3853 slsr_cand_t basis
= lookup_cand (c
->basis
);
3854 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3855 replace_one_candidate (c
, i
, basis_name
);
3861 replace_profitable_candidates (lookup_cand (c
->sibling
));
3864 replace_profitable_candidates (lookup_cand (c
->dependent
));
3867 /* Analyze costs of related candidates in the candidate vector,
3868 and make beneficial replacements. */
3871 analyze_candidates_and_replace (void)
3876 /* Each candidate that has a null basis and a non-null
3877 dependent is the root of a tree of related statements.
3878 Analyze each tree to determine a subset of those
3879 statements that can be replaced with maximum benefit.
3881 Note the first NULL element is skipped. */
3882 FOR_EACH_VEC_ELT_FROM (cand_vec
, i
, c
, 1)
3884 slsr_cand_t first_dep
;
3886 if (c
->basis
!= 0 || c
->dependent
== 0)
3889 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3890 fprintf (dump_file
, "\nProcessing dependency tree rooted at %d.\n",
3893 first_dep
= lookup_cand (c
->dependent
);
3895 /* If this is a chain of CAND_REFs, unconditionally replace
3896 each of them with a strength-reduced data reference. */
3897 if (c
->kind
== CAND_REF
)
3900 /* If the common stride of all related candidates is a known
3901 constant, each candidate without a phi-dependence can be
3902 profitably replaced. Each replaces a multiply by a single
3903 add, with the possibility that a feeding add also goes dead.
3904 A candidate with a phi-dependence is replaced only if the
3905 compensation code it requires is offset by the strength
3906 reduction savings. */
3907 else if (TREE_CODE (c
->stride
) == INTEGER_CST
)
3908 replace_uncond_cands_and_profitable_phis (first_dep
);
3910 /* When the stride is an SSA name, it may still be profitable
3911 to replace some or all of the dependent candidates, depending
3912 on whether the introduced increments can be reused, or are
3913 less expensive to calculate than the replaced statements. */
3919 /* Determine whether we'll be generating pointer arithmetic
3920 when replacing candidates. */
3921 address_arithmetic_p
= (c
->kind
== CAND_ADD
3922 && POINTER_TYPE_P (c
->cand_type
));
3924 /* If all candidates have already been replaced under other
3925 interpretations, nothing remains to be done. */
3926 if (!count_candidates (c
))
3929 /* Construct an array of increments for this candidate chain. */
3930 incr_vec
= XNEWVEC (incr_info
, MAX_INCR_VEC_LEN
);
3932 record_increments (c
);
3934 /* Determine which increments are profitable to replace. */
3935 mode
= TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
)));
3936 speed
= optimize_cands_for_speed_p (c
);
3937 analyze_increments (first_dep
, mode
, speed
);
3939 /* Insert initializers of the form T_0 = stride * increment
3940 for use in profitable replacements. */
3941 insert_initializers (first_dep
);
3944 /* Perform the replacements. */
3945 replace_profitable_candidates (first_dep
);
3950 /* For conditional candidates, we may have uncommitted insertions
3951 on edges to clean up. */
3952 gsi_commit_edge_inserts ();
3957 const pass_data pass_data_strength_reduction
=
3959 GIMPLE_PASS
, /* type */
3961 OPTGROUP_NONE
, /* optinfo_flags */
3962 TV_GIMPLE_SLSR
, /* tv_id */
3963 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3964 0, /* properties_provided */
3965 0, /* properties_destroyed */
3966 0, /* todo_flags_start */
3967 0, /* todo_flags_finish */
3970 class pass_strength_reduction
: public gimple_opt_pass
3973 pass_strength_reduction (gcc::context
*ctxt
)
3974 : gimple_opt_pass (pass_data_strength_reduction
, ctxt
)
3977 /* opt_pass methods: */
3978 virtual bool gate (function
*) { return flag_tree_slsr
; }
3979 virtual unsigned int execute (function
*);
3981 }; // class pass_strength_reduction
3984 pass_strength_reduction::execute (function
*fun
)
3986 /* Create the obstack where candidates will reside. */
3987 gcc_obstack_init (&cand_obstack
);
3989 /* Allocate the candidate vector and initialize the first NULL element. */
3990 cand_vec
.create (128);
3991 cand_vec
.safe_push (NULL
);
3993 /* Allocate the mapping from statements to candidate indices. */
3994 stmt_cand_map
= new hash_map
<gimple
*, slsr_cand_t
>;
3996 /* Create the obstack where candidate chains will reside. */
3997 gcc_obstack_init (&chain_obstack
);
3999 /* Allocate the mapping from base expressions to candidate chains. */
4000 base_cand_map
= new hash_table
<cand_chain_hasher
> (500);
4002 /* Allocate the mapping from bases to alternative bases. */
4003 alt_base_map
= new hash_map
<tree
, tree
>;
4005 /* Initialize the loop optimizer. We need to detect flow across
4006 back edges, and this gives us dominator information as well. */
4007 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
4009 /* Walk the CFG in predominator order looking for strength reduction
4011 find_candidates_dom_walker (CDI_DOMINATORS
)
4012 .walk (fun
->cfg
->x_entry_block_ptr
);
4014 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4017 dump_cand_chains ();
4020 delete alt_base_map
;
4021 free_affine_expand_cache (&name_expansions
);
4023 /* Analyze costs and make appropriate replacements. */
4024 analyze_candidates_and_replace ();
4026 loop_optimizer_finalize ();
4027 delete base_cand_map
;
4028 base_cand_map
= NULL
;
4029 obstack_free (&chain_obstack
, NULL
);
4030 delete stmt_cand_map
;
4031 cand_vec
.release ();
4032 obstack_free (&cand_obstack
, NULL
);
4040 make_pass_strength_reduction (gcc::context
*ctxt
)
4042 return new pass_strength_reduction (ctxt
);