1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
38 #include "coretypes.h"
44 #include "tree-pass.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
72 Any statement S0 that dominates S1 and is of the form:
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
77 is called a "basis" for S1. In both cases, S1 may be replaced by
79 S1': X = Y + (i - i') * S,
81 where (i - i') * S is folded to the extent possible.
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
103 Currently we don't recognize:
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
152 Conditional candidates
153 ======================
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
181 where the bracketed instructions may go dead.
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx
;
219 /* The kind of candidate. */
230 /* The candidate statement S1. */
233 /* The base expression B: often an SSA name, but not always. */
239 /* The index constant i. */
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
249 /* The type to be used to interpret the stride field when the stride
250 is not a constant. Normally the same as the type of the recorded
251 stride, but when the stride has been cast we need to maintain that
252 knowledge in order to make legal substitutions without losing
253 precision. When the stride is a constant, this will be sizetype. */
256 /* The kind of candidate (CAND_MULT, etc.). */
259 /* Index of this candidate in the candidate vector. */
262 /* Index of the next candidate record for the same statement.
263 A statement may be useful in more than one way (e.g., due to
264 commutativity). So we can have multiple "interpretations"
266 cand_idx next_interp
;
268 /* Index of the basis statement S0, if any, in the candidate vector. */
271 /* First candidate for which this candidate is a basis, if one exists. */
274 /* Next candidate having the same basis as this one. */
277 /* If this is a conditional candidate, the CAND_PHI candidate
278 that defines the base SSA name B. */
281 /* Savings that can be expected from eliminating dead code if this
282 candidate is replaced. */
286 typedef struct slsr_cand_d slsr_cand
, *slsr_cand_t
;
287 typedef const struct slsr_cand_d
*const_slsr_cand_t
;
289 /* Pointers to candidates are chained together as part of a mapping
290 from base expressions to the candidates that use them. */
294 /* Base expression for the chain of candidates: often, but not
295 always, an SSA name. */
298 /* Pointer to a candidate. */
302 struct cand_chain_d
*next
;
306 typedef struct cand_chain_d cand_chain
, *cand_chain_t
;
307 typedef const struct cand_chain_d
*const_cand_chain_t
;
309 /* Information about a unique "increment" associated with candidates
310 having an SSA name for a stride. An increment is the difference
311 between the index of the candidate and the index of its basis,
312 i.e., (i - i') as discussed in the module commentary.
314 When we are not going to generate address arithmetic we treat
315 increments that differ only in sign as the same, allowing sharing
316 of the cost of initializers. The absolute value of the increment
317 is stored in the incr_info. */
321 /* The increment that relates a candidate to its basis. */
324 /* How many times the increment occurs in the candidate tree. */
327 /* Cost of replacing candidates using this increment. Negative and
328 zero costs indicate replacement should be performed. */
331 /* If this increment is profitable but is not -1, 0, or 1, it requires
332 an initializer T_0 = stride * incr to be found or introduced in the
333 nearest common dominator of all candidates. This field holds T_0
334 for subsequent use. */
337 /* If the initializer was found to already exist, this is the block
338 where it was found. */
342 typedef struct incr_info_d incr_info
, *incr_info_t
;
344 /* Candidates are maintained in a vector. If candidate X dominates
345 candidate Y, then X appears before Y in the vector; but the
346 converse does not necessarily hold. */
347 static vec
<slsr_cand_t
> cand_vec
;
361 enum phi_adjust_status
367 enum count_phis_status
373 /* Pointer map embodying a mapping from statements to candidates. */
374 static hash_map
<gimple
*, slsr_cand_t
> *stmt_cand_map
;
376 /* Obstack for candidates. */
377 static struct obstack cand_obstack
;
379 /* Obstack for candidate chains. */
380 static struct obstack chain_obstack
;
382 /* An array INCR_VEC of incr_infos is used during analysis of related
383 candidates having an SSA name for a stride. INCR_VEC_LEN describes
384 its current length. MAX_INCR_VEC_LEN is used to avoid costly
385 pathological cases. */
386 static incr_info_t incr_vec
;
387 static unsigned incr_vec_len
;
388 const int MAX_INCR_VEC_LEN
= 16;
390 /* For a chain of candidates with unknown stride, indicates whether or not
391 we must generate pointer arithmetic when replacing statements. */
392 static bool address_arithmetic_p
;
394 /* Forward function declarations. */
395 static slsr_cand_t
base_cand_from_table (tree
);
396 static tree
introduce_cast_before_cand (slsr_cand_t
, tree
, tree
);
397 static bool legal_cast_p_1 (tree
, tree
);
399 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
402 lookup_cand (cand_idx idx
)
404 return cand_vec
[idx
- 1];
407 /* Helper for hashing a candidate chain header. */
409 struct cand_chain_hasher
: nofree_ptr_hash
<cand_chain
>
411 static inline hashval_t
hash (const cand_chain
*);
412 static inline bool equal (const cand_chain
*, const cand_chain
*);
416 cand_chain_hasher::hash (const cand_chain
*p
)
418 tree base_expr
= p
->base_expr
;
419 return iterative_hash_expr (base_expr
, 0);
423 cand_chain_hasher::equal (const cand_chain
*chain1
, const cand_chain
*chain2
)
425 return operand_equal_p (chain1
->base_expr
, chain2
->base_expr
, 0);
428 /* Hash table embodying a mapping from base exprs to chains of candidates. */
429 static hash_table
<cand_chain_hasher
> *base_cand_map
;
431 /* Pointer map used by tree_to_aff_combination_expand. */
432 static hash_map
<tree
, name_expansion
*> *name_expansions
;
433 /* Pointer map embodying a mapping from bases to alternative bases. */
434 static hash_map
<tree
, tree
> *alt_base_map
;
436 /* Given BASE, use the tree affine combiniation facilities to
437 find the underlying tree expression for BASE, with any
438 immediate offset excluded.
440 N.B. we should eliminate this backtracking with better forward
441 analysis in a future release. */
444 get_alternative_base (tree base
)
446 tree
*result
= alt_base_map
->get (base
);
453 tree_to_aff_combination_expand (base
, TREE_TYPE (base
),
454 &aff
, &name_expansions
);
456 expr
= aff_combination_to_tree (&aff
);
458 gcc_assert (!alt_base_map
->put (base
, base
== expr
? NULL
: expr
));
460 return expr
== base
? NULL
: expr
;
466 /* Look in the candidate table for a CAND_PHI that defines BASE and
467 return it if found; otherwise return NULL. */
470 find_phi_def (tree base
)
474 if (TREE_CODE (base
) != SSA_NAME
)
477 c
= base_cand_from_table (base
);
479 if (!c
|| c
->kind
!= CAND_PHI
)
485 /* Helper routine for find_basis_for_candidate. May be called twice:
486 once for the candidate's base expr, and optionally again either for
487 the candidate's phi definition or for a CAND_REF's alternative base
491 find_basis_for_base_expr (slsr_cand_t c
, tree base_expr
)
493 cand_chain mapping_key
;
495 slsr_cand_t basis
= NULL
;
497 // Limit potential of N^2 behavior for long candidate chains.
499 int max_iters
= PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN
);
501 mapping_key
.base_expr
= base_expr
;
502 chain
= base_cand_map
->find (&mapping_key
);
504 for (; chain
&& iters
< max_iters
; chain
= chain
->next
, ++iters
)
506 slsr_cand_t one_basis
= chain
->cand
;
508 if (one_basis
->kind
!= c
->kind
509 || one_basis
->cand_stmt
== c
->cand_stmt
510 || !operand_equal_p (one_basis
->stride
, c
->stride
, 0)
511 || !types_compatible_p (one_basis
->cand_type
, c
->cand_type
)
512 || !types_compatible_p (one_basis
->stride_type
, c
->stride_type
)
513 || !dominated_by_p (CDI_DOMINATORS
,
514 gimple_bb (c
->cand_stmt
),
515 gimple_bb (one_basis
->cand_stmt
)))
518 if (!basis
|| basis
->cand_num
< one_basis
->cand_num
)
525 /* Use the base expr from candidate C to look for possible candidates
526 that can serve as a basis for C. Each potential basis must also
527 appear in a block that dominates the candidate statement and have
528 the same stride and type. If more than one possible basis exists,
529 the one with highest index in the vector is chosen; this will be
530 the most immediately dominating basis. */
533 find_basis_for_candidate (slsr_cand_t c
)
535 slsr_cand_t basis
= find_basis_for_base_expr (c
, c
->base_expr
);
537 /* If a candidate doesn't have a basis using its base expression,
538 it may have a basis hidden by one or more intervening phis. */
539 if (!basis
&& c
->def_phi
)
541 basic_block basis_bb
, phi_bb
;
542 slsr_cand_t phi_cand
= lookup_cand (c
->def_phi
);
543 basis
= find_basis_for_base_expr (c
, phi_cand
->base_expr
);
547 /* A hidden basis must dominate the phi-definition of the
548 candidate's base name. */
549 phi_bb
= gimple_bb (phi_cand
->cand_stmt
);
550 basis_bb
= gimple_bb (basis
->cand_stmt
);
552 if (phi_bb
== basis_bb
553 || !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
559 /* If we found a hidden basis, estimate additional dead-code
560 savings if the phi and its feeding statements can be removed. */
561 if (basis
&& has_single_use (gimple_phi_result (phi_cand
->cand_stmt
)))
562 c
->dead_savings
+= phi_cand
->dead_savings
;
566 if (flag_expensive_optimizations
&& !basis
&& c
->kind
== CAND_REF
)
568 tree alt_base_expr
= get_alternative_base (c
->base_expr
);
570 basis
= find_basis_for_base_expr (c
, alt_base_expr
);
575 c
->sibling
= basis
->dependent
;
576 basis
->dependent
= c
->cand_num
;
577 return basis
->cand_num
;
583 /* Record a mapping from BASE to C, indicating that C may potentially serve
584 as a basis using that base expression. BASE may be the same as
585 C->BASE_EXPR; alternatively BASE can be a different tree that share the
586 underlining expression of C->BASE_EXPR. */
589 record_potential_basis (slsr_cand_t c
, tree base
)
596 node
= (cand_chain_t
) obstack_alloc (&chain_obstack
, sizeof (cand_chain
));
597 node
->base_expr
= base
;
600 slot
= base_cand_map
->find_slot (node
, INSERT
);
604 cand_chain_t head
= (cand_chain_t
) (*slot
);
605 node
->next
= head
->next
;
612 /* Allocate storage for a new candidate and initialize its fields.
613 Attempt to find a basis for the candidate.
615 For CAND_REF, an alternative base may also be recorded and used
616 to find a basis. This helps cases where the expression hidden
617 behind BASE (which is usually an SSA_NAME) has immediate offset,
621 a2[i + 20][j] = 2; */
624 alloc_cand_and_find_basis (enum cand_kind kind
, gimple
*gs
, tree base
,
625 const widest_int
&index
, tree stride
, tree ctype
,
626 tree stype
, unsigned savings
)
628 slsr_cand_t c
= (slsr_cand_t
) obstack_alloc (&cand_obstack
,
634 c
->cand_type
= ctype
;
635 c
->stride_type
= stype
;
637 c
->cand_num
= cand_vec
.length () + 1;
641 c
->def_phi
= kind
== CAND_MULT
? find_phi_def (base
) : 0;
642 c
->dead_savings
= savings
;
644 cand_vec
.safe_push (c
);
646 if (kind
== CAND_PHI
)
649 c
->basis
= find_basis_for_candidate (c
);
651 record_potential_basis (c
, base
);
652 if (flag_expensive_optimizations
&& kind
== CAND_REF
)
654 tree alt_base
= get_alternative_base (base
);
656 record_potential_basis (c
, alt_base
);
662 /* Determine the target cost of statement GS when compiling according
666 stmt_cost (gimple
*gs
, bool speed
)
668 tree lhs
, rhs1
, rhs2
;
669 machine_mode lhs_mode
;
671 gcc_assert (is_gimple_assign (gs
));
672 lhs
= gimple_assign_lhs (gs
);
673 rhs1
= gimple_assign_rhs1 (gs
);
674 lhs_mode
= TYPE_MODE (TREE_TYPE (lhs
));
676 switch (gimple_assign_rhs_code (gs
))
679 rhs2
= gimple_assign_rhs2 (gs
);
681 if (tree_fits_shwi_p (rhs2
))
682 return mult_by_coeff_cost (tree_to_shwi (rhs2
), lhs_mode
, speed
);
684 gcc_assert (TREE_CODE (rhs1
) != INTEGER_CST
);
685 return mul_cost (speed
, lhs_mode
);
688 case POINTER_PLUS_EXPR
:
690 return add_cost (speed
, lhs_mode
);
693 return neg_cost (speed
, lhs_mode
);
696 return convert_cost (lhs_mode
, TYPE_MODE (TREE_TYPE (rhs1
)), speed
);
698 /* Note that we don't assign costs to copies that in most cases
711 /* Look up the defining statement for BASE_IN and return a pointer
712 to its candidate in the candidate table, if any; otherwise NULL.
713 Only CAND_ADD and CAND_MULT candidates are returned. */
716 base_cand_from_table (tree base_in
)
720 gimple
*def
= SSA_NAME_DEF_STMT (base_in
);
722 return (slsr_cand_t
) NULL
;
724 result
= stmt_cand_map
->get (def
);
726 if (result
&& (*result
)->kind
!= CAND_REF
)
729 return (slsr_cand_t
) NULL
;
732 /* Add an entry to the statement-to-candidate mapping. */
735 add_cand_for_stmt (gimple
*gs
, slsr_cand_t c
)
737 gcc_assert (!stmt_cand_map
->put (gs
, c
));
740 /* Given PHI which contains a phi statement, determine whether it
741 satisfies all the requirements of a phi candidate. If so, create
742 a candidate. Note that a CAND_PHI never has a basis itself, but
743 is used to help find a basis for subsequent candidates. */
746 slsr_process_phi (gphi
*phi
, bool speed
)
749 tree arg0_base
= NULL_TREE
, base_type
;
751 struct loop
*cand_loop
= gimple_bb (phi
)->loop_father
;
752 unsigned savings
= 0;
754 /* A CAND_PHI requires each of its arguments to have the same
755 derived base name. (See the module header commentary for a
756 definition of derived base names.) Furthermore, all feeding
757 definitions must be in the same position in the loop hierarchy
760 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
762 slsr_cand_t arg_cand
;
763 tree arg
= gimple_phi_arg_def (phi
, i
);
764 tree derived_base_name
= NULL_TREE
;
765 gimple
*arg_stmt
= NULL
;
766 basic_block arg_bb
= NULL
;
768 if (TREE_CODE (arg
) != SSA_NAME
)
771 arg_cand
= base_cand_from_table (arg
);
775 while (arg_cand
->kind
!= CAND_ADD
&& arg_cand
->kind
!= CAND_PHI
)
777 if (!arg_cand
->next_interp
)
780 arg_cand
= lookup_cand (arg_cand
->next_interp
);
783 if (!integer_onep (arg_cand
->stride
))
786 derived_base_name
= arg_cand
->base_expr
;
787 arg_stmt
= arg_cand
->cand_stmt
;
788 arg_bb
= gimple_bb (arg_stmt
);
790 /* Gather potential dead code savings if the phi statement
791 can be removed later on. */
792 if (has_single_use (arg
))
794 if (gimple_code (arg_stmt
) == GIMPLE_PHI
)
795 savings
+= arg_cand
->dead_savings
;
797 savings
+= stmt_cost (arg_stmt
, speed
);
800 else if (SSA_NAME_IS_DEFAULT_DEF (arg
))
802 derived_base_name
= arg
;
803 arg_bb
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
806 if (!arg_bb
|| arg_bb
->loop_father
!= cand_loop
)
810 arg0_base
= derived_base_name
;
811 else if (!operand_equal_p (derived_base_name
, arg0_base
, 0))
815 /* Create the candidate. "alloc_cand_and_find_basis" is named
816 misleadingly for this case, as no basis will be sought for a
818 base_type
= TREE_TYPE (arg0_base
);
820 c
= alloc_cand_and_find_basis (CAND_PHI
, phi
, arg0_base
,
821 0, integer_one_node
, base_type
,
824 /* Add the candidate to the statement-candidate mapping. */
825 add_cand_for_stmt (phi
, c
);
828 /* Given PBASE which is a pointer to tree, look up the defining
829 statement for it and check whether the candidate is in the
832 X = B + (1 * S), S is integer constant
833 X = B + (i * S), S is integer one
835 If so, set PBASE to the candidate's base_expr and return double
837 Otherwise, just return double int zero. */
840 backtrace_base_for_ref (tree
*pbase
)
842 tree base_in
= *pbase
;
843 slsr_cand_t base_cand
;
845 STRIP_NOPS (base_in
);
847 /* Strip off widening conversion(s) to handle cases where
848 e.g. 'B' is widened from an 'int' in order to calculate
850 if (CONVERT_EXPR_P (base_in
)
851 && legal_cast_p_1 (TREE_TYPE (base_in
),
852 TREE_TYPE (TREE_OPERAND (base_in
, 0))))
853 base_in
= get_unwidened (base_in
, NULL_TREE
);
855 if (TREE_CODE (base_in
) != SSA_NAME
)
858 base_cand
= base_cand_from_table (base_in
);
860 while (base_cand
&& base_cand
->kind
!= CAND_PHI
)
862 if (base_cand
->kind
== CAND_ADD
863 && base_cand
->index
== 1
864 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
866 /* X = B + (1 * S), S is integer constant. */
867 *pbase
= base_cand
->base_expr
;
868 return wi::to_widest (base_cand
->stride
);
870 else if (base_cand
->kind
== CAND_ADD
871 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
872 && integer_onep (base_cand
->stride
))
874 /* X = B + (i * S), S is integer one. */
875 *pbase
= base_cand
->base_expr
;
876 return base_cand
->index
;
879 if (base_cand
->next_interp
)
880 base_cand
= lookup_cand (base_cand
->next_interp
);
888 /* Look for the following pattern:
890 *PBASE: MEM_REF (T1, C1)
892 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
894 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
896 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
898 *PINDEX: C4 * BITS_PER_UNIT
900 If not present, leave the input values unchanged and return FALSE.
901 Otherwise, modify the input values as follows and return TRUE:
904 *POFFSET: MULT_EXPR (T2, C3)
905 *PINDEX: C1 + (C2 * C3) + C4
907 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
908 will be further restructured to:
911 *POFFSET: MULT_EXPR (T2', C3)
912 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
915 restructure_reference (tree
*pbase
, tree
*poffset
, widest_int
*pindex
,
918 tree base
= *pbase
, offset
= *poffset
;
919 widest_int index
= *pindex
;
920 tree mult_op0
, t1
, t2
, type
;
921 widest_int c1
, c2
, c3
, c4
, c5
;
925 || TREE_CODE (base
) != MEM_REF
926 || TREE_CODE (offset
) != MULT_EXPR
927 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
928 || wi::umod_floor (index
, BITS_PER_UNIT
) != 0)
931 t1
= TREE_OPERAND (base
, 0);
932 c1
= widest_int::from (mem_ref_offset (base
), SIGNED
);
933 type
= TREE_TYPE (TREE_OPERAND (base
, 1));
935 mult_op0
= TREE_OPERAND (offset
, 0);
936 c3
= wi::to_widest (TREE_OPERAND (offset
, 1));
938 if (TREE_CODE (mult_op0
) == PLUS_EXPR
)
940 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
942 t2
= TREE_OPERAND (mult_op0
, 0);
943 c2
= wi::to_widest (TREE_OPERAND (mult_op0
, 1));
948 else if (TREE_CODE (mult_op0
) == MINUS_EXPR
)
950 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
952 t2
= TREE_OPERAND (mult_op0
, 0);
953 c2
= -wi::to_widest (TREE_OPERAND (mult_op0
, 1));
964 c4
= index
>> LOG2_BITS_PER_UNIT
;
965 c5
= backtrace_base_for_ref (&t2
);
968 *poffset
= fold_build2 (MULT_EXPR
, sizetype
, fold_convert (sizetype
, t2
),
969 wide_int_to_tree (sizetype
, c3
));
970 *pindex
= c1
+ c2
* c3
+ c4
+ c5
* c3
;
976 /* Given GS which contains a data reference, create a CAND_REF entry in
977 the candidate table and attempt to find a basis. */
980 slsr_process_ref (gimple
*gs
)
982 tree ref_expr
, base
, offset
, type
;
983 HOST_WIDE_INT bitsize
, bitpos
;
985 int unsignedp
, reversep
, volatilep
;
988 if (gimple_vdef (gs
))
989 ref_expr
= gimple_assign_lhs (gs
);
991 ref_expr
= gimple_assign_rhs1 (gs
);
993 if (!handled_component_p (ref_expr
)
994 || TREE_CODE (ref_expr
) == BIT_FIELD_REF
995 || (TREE_CODE (ref_expr
) == COMPONENT_REF
996 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr
, 1))))
999 base
= get_inner_reference (ref_expr
, &bitsize
, &bitpos
, &offset
, &mode
,
1000 &unsignedp
, &reversep
, &volatilep
);
1003 widest_int index
= bitpos
;
1005 if (!restructure_reference (&base
, &offset
, &index
, &type
))
1008 c
= alloc_cand_and_find_basis (CAND_REF
, gs
, base
, index
, offset
,
1011 /* Add the candidate to the statement-candidate mapping. */
1012 add_cand_for_stmt (gs
, c
);
1015 /* Create a candidate entry for a statement GS, where GS multiplies
1016 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1017 about the two SSA names into the new candidate. Return the new
1021 create_mul_ssa_cand (gimple
*gs
, tree base_in
, tree stride_in
, bool speed
)
1023 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1024 tree stype
= NULL_TREE
;
1026 unsigned savings
= 0;
1028 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1030 /* Look at all interpretations of the base candidate, if necessary,
1031 to find information to propagate into this candidate. */
1032 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1035 if (base_cand
->kind
== CAND_MULT
&& integer_onep (base_cand
->stride
))
1041 base
= base_cand
->base_expr
;
1042 index
= base_cand
->index
;
1044 ctype
= base_cand
->cand_type
;
1045 stype
= TREE_TYPE (stride_in
);
1046 if (has_single_use (base_in
))
1047 savings
= (base_cand
->dead_savings
1048 + stmt_cost (base_cand
->cand_stmt
, speed
));
1050 else if (base_cand
->kind
== CAND_ADD
1051 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1053 /* Y = B + (i' * S), S constant
1055 ============================
1056 X = B + ((i' * S) * Z) */
1057 base
= base_cand
->base_expr
;
1058 index
= base_cand
->index
* wi::to_widest (base_cand
->stride
);
1060 ctype
= base_cand
->cand_type
;
1061 stype
= TREE_TYPE (stride_in
);
1062 if (has_single_use (base_in
))
1063 savings
= (base_cand
->dead_savings
1064 + stmt_cost (base_cand
->cand_stmt
, speed
));
1067 if (base_cand
->next_interp
)
1068 base_cand
= lookup_cand (base_cand
->next_interp
);
1075 /* No interpretations had anything useful to propagate, so
1076 produce X = (Y + 0) * Z. */
1080 ctype
= TREE_TYPE (base_in
);
1081 stype
= TREE_TYPE (stride_in
);
1084 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1085 ctype
, stype
, savings
);
1089 /* Create a candidate entry for a statement GS, where GS multiplies
1090 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1091 information about BASE_IN into the new candidate. Return the new
1095 create_mul_imm_cand (gimple
*gs
, tree base_in
, tree stride_in
, bool speed
)
1097 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1098 widest_int index
, temp
;
1099 unsigned savings
= 0;
1101 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1103 /* Look at all interpretations of the base candidate, if necessary,
1104 to find information to propagate into this candidate. */
1105 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1107 if (base_cand
->kind
== CAND_MULT
1108 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1110 /* Y = (B + i') * S, S constant
1112 ============================
1113 X = (B + i') * (S * c) */
1114 temp
= wi::to_widest (base_cand
->stride
) * wi::to_widest (stride_in
);
1115 if (wi::fits_to_tree_p (temp
, TREE_TYPE (stride_in
)))
1117 base
= base_cand
->base_expr
;
1118 index
= base_cand
->index
;
1119 stride
= wide_int_to_tree (TREE_TYPE (stride_in
), temp
);
1120 ctype
= base_cand
->cand_type
;
1121 if (has_single_use (base_in
))
1122 savings
= (base_cand
->dead_savings
1123 + stmt_cost (base_cand
->cand_stmt
, speed
));
1126 else if (base_cand
->kind
== CAND_ADD
&& integer_onep (base_cand
->stride
))
1130 ===========================
1132 base
= base_cand
->base_expr
;
1133 index
= base_cand
->index
;
1135 ctype
= base_cand
->cand_type
;
1136 if (has_single_use (base_in
))
1137 savings
= (base_cand
->dead_savings
1138 + stmt_cost (base_cand
->cand_stmt
, speed
));
1140 else if (base_cand
->kind
== CAND_ADD
1141 && base_cand
->index
== 1
1142 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
1144 /* Y = B + (1 * S), S constant
1146 ===========================
1148 base
= base_cand
->base_expr
;
1149 index
= wi::to_widest (base_cand
->stride
);
1151 ctype
= base_cand
->cand_type
;
1152 if (has_single_use (base_in
))
1153 savings
= (base_cand
->dead_savings
1154 + stmt_cost (base_cand
->cand_stmt
, speed
));
1157 if (base_cand
->next_interp
)
1158 base_cand
= lookup_cand (base_cand
->next_interp
);
1165 /* No interpretations had anything useful to propagate, so
1166 produce X = (Y + 0) * c. */
1170 ctype
= TREE_TYPE (base_in
);
1173 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1174 ctype
, sizetype
, savings
);
1178 /* Given GS which is a multiply of scalar integers, make an appropriate
1179 entry in the candidate table. If this is a multiply of two SSA names,
1180 create two CAND_MULT interpretations and attempt to find a basis for
1181 each of them. Otherwise, create a single CAND_MULT and attempt to
1185 slsr_process_mul (gimple
*gs
, tree rhs1
, tree rhs2
, bool speed
)
1189 /* If this is a multiply of an SSA name with itself, it is highly
1190 unlikely that we will get a strength reduction opportunity, so
1191 don't record it as a candidate. This simplifies the logic for
1192 finding a basis, so if this is removed that must be considered. */
1196 if (TREE_CODE (rhs2
) == SSA_NAME
)
1198 /* Record an interpretation of this statement in the candidate table
1199 assuming RHS1 is the base expression and RHS2 is the stride. */
1200 c
= create_mul_ssa_cand (gs
, rhs1
, rhs2
, speed
);
1202 /* Add the first interpretation to the statement-candidate mapping. */
1203 add_cand_for_stmt (gs
, c
);
1205 /* Record another interpretation of this statement assuming RHS1
1206 is the stride and RHS2 is the base expression. */
1207 c2
= create_mul_ssa_cand (gs
, rhs2
, rhs1
, speed
);
1208 c
->next_interp
= c2
->cand_num
;
1212 /* Record an interpretation for the multiply-immediate. */
1213 c
= create_mul_imm_cand (gs
, rhs1
, rhs2
, speed
);
1215 /* Add the interpretation to the statement-candidate mapping. */
1216 add_cand_for_stmt (gs
, c
);
1220 /* Create a candidate entry for a statement GS, where GS adds two
1221 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1222 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1223 information about the two SSA names into the new candidate.
1224 Return the new candidate. */
1227 create_add_ssa_cand (gimple
*gs
, tree base_in
, tree addend_in
,
1228 bool subtract_p
, bool speed
)
1230 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1231 tree stype
= NULL_TREE
;
1233 unsigned savings
= 0;
1235 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1236 slsr_cand_t addend_cand
= base_cand_from_table (addend_in
);
1238 /* The most useful transformation is a multiply-immediate feeding
1239 an add or subtract. Look for that first. */
1240 while (addend_cand
&& !base
&& addend_cand
->kind
!= CAND_PHI
)
1242 if (addend_cand
->kind
== CAND_MULT
1243 && addend_cand
->index
== 0
1244 && TREE_CODE (addend_cand
->stride
) == INTEGER_CST
)
1246 /* Z = (B + 0) * S, S constant
1248 ===========================
1249 X = Y + ((+/-1 * S) * B) */
1251 index
= wi::to_widest (addend_cand
->stride
);
1254 stride
= addend_cand
->base_expr
;
1255 ctype
= TREE_TYPE (base_in
);
1256 stype
= addend_cand
->cand_type
;
1257 if (has_single_use (addend_in
))
1258 savings
= (addend_cand
->dead_savings
1259 + stmt_cost (addend_cand
->cand_stmt
, speed
));
1262 if (addend_cand
->next_interp
)
1263 addend_cand
= lookup_cand (addend_cand
->next_interp
);
1268 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1270 if (base_cand
->kind
== CAND_ADD
1271 && (base_cand
->index
== 0
1272 || operand_equal_p (base_cand
->stride
,
1273 integer_zero_node
, 0)))
1275 /* Y = B + (i' * S), i' * S = 0
1277 ============================
1278 X = B + (+/-1 * Z) */
1279 base
= base_cand
->base_expr
;
1280 index
= subtract_p
? -1 : 1;
1282 ctype
= base_cand
->cand_type
;
1283 stype
= (TREE_CODE (addend_in
) == INTEGER_CST
? sizetype
1284 : TREE_TYPE (addend_in
));
1285 if (has_single_use (base_in
))
1286 savings
= (base_cand
->dead_savings
1287 + stmt_cost (base_cand
->cand_stmt
, speed
));
1289 else if (subtract_p
)
1291 slsr_cand_t subtrahend_cand
= base_cand_from_table (addend_in
);
1293 while (subtrahend_cand
&& !base
&& subtrahend_cand
->kind
!= CAND_PHI
)
1295 if (subtrahend_cand
->kind
== CAND_MULT
1296 && subtrahend_cand
->index
== 0
1297 && TREE_CODE (subtrahend_cand
->stride
) == INTEGER_CST
)
1299 /* Z = (B + 0) * S, S constant
1301 ===========================
1302 Value: X = Y + ((-1 * S) * B) */
1304 index
= wi::to_widest (subtrahend_cand
->stride
);
1306 stride
= subtrahend_cand
->base_expr
;
1307 ctype
= TREE_TYPE (base_in
);
1308 stype
= subtrahend_cand
->cand_type
;
1309 if (has_single_use (addend_in
))
1310 savings
= (subtrahend_cand
->dead_savings
1311 + stmt_cost (subtrahend_cand
->cand_stmt
, speed
));
1314 if (subtrahend_cand
->next_interp
)
1315 subtrahend_cand
= lookup_cand (subtrahend_cand
->next_interp
);
1317 subtrahend_cand
= NULL
;
1321 if (base_cand
->next_interp
)
1322 base_cand
= lookup_cand (base_cand
->next_interp
);
1329 /* No interpretations had anything useful to propagate, so
1330 produce X = Y + (1 * Z). */
1332 index
= subtract_p
? -1 : 1;
1334 ctype
= TREE_TYPE (base_in
);
1335 stype
= (TREE_CODE (addend_in
) == INTEGER_CST
? sizetype
1336 : TREE_TYPE (addend_in
));
1339 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, base
, index
, stride
,
1340 ctype
, stype
, savings
);
1344 /* Create a candidate entry for a statement GS, where GS adds SSA
1345 name BASE_IN to constant INDEX_IN. Propagate any known information
1346 about BASE_IN into the new candidate. Return the new candidate. */
1349 create_add_imm_cand (gimple
*gs
, tree base_in
, const widest_int
&index_in
,
1352 enum cand_kind kind
= CAND_ADD
;
1353 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1354 tree stype
= NULL_TREE
;
1355 widest_int index
, multiple
;
1356 unsigned savings
= 0;
1358 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1360 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1362 signop sign
= TYPE_SIGN (TREE_TYPE (base_cand
->stride
));
1364 if (TREE_CODE (base_cand
->stride
) == INTEGER_CST
1365 && wi::multiple_of_p (index_in
, wi::to_widest (base_cand
->stride
),
1368 /* Y = (B + i') * S, S constant, c = kS for some integer k
1370 ============================
1371 X = (B + (i'+ k)) * S
1373 Y = B + (i' * S), S constant, c = kS for some integer k
1375 ============================
1376 X = (B + (i'+ k)) * S */
1377 kind
= base_cand
->kind
;
1378 base
= base_cand
->base_expr
;
1379 index
= base_cand
->index
+ multiple
;
1380 stride
= base_cand
->stride
;
1381 ctype
= base_cand
->cand_type
;
1382 stype
= base_cand
->stride_type
;
1383 if (has_single_use (base_in
))
1384 savings
= (base_cand
->dead_savings
1385 + stmt_cost (base_cand
->cand_stmt
, speed
));
1388 if (base_cand
->next_interp
)
1389 base_cand
= lookup_cand (base_cand
->next_interp
);
1396 /* No interpretations had anything useful to propagate, so
1397 produce X = Y + (c * 1). */
1401 stride
= integer_one_node
;
1402 ctype
= TREE_TYPE (base_in
);
1406 c
= alloc_cand_and_find_basis (kind
, gs
, base
, index
, stride
,
1407 ctype
, stype
, savings
);
1411 /* Given GS which is an add or subtract of scalar integers or pointers,
1412 make at least one appropriate entry in the candidate table. */
1415 slsr_process_add (gimple
*gs
, tree rhs1
, tree rhs2
, bool speed
)
1417 bool subtract_p
= gimple_assign_rhs_code (gs
) == MINUS_EXPR
;
1418 slsr_cand_t c
= NULL
, c2
;
1420 if (TREE_CODE (rhs2
) == SSA_NAME
)
1422 /* First record an interpretation assuming RHS1 is the base expression
1423 and RHS2 is the stride. But it doesn't make sense for the
1424 stride to be a pointer, so don't record a candidate in that case. */
1425 if (!POINTER_TYPE_P (TREE_TYPE (rhs2
)))
1427 c
= create_add_ssa_cand (gs
, rhs1
, rhs2
, subtract_p
, speed
);
1429 /* Add the first interpretation to the statement-candidate
1431 add_cand_for_stmt (gs
, c
);
1434 /* If the two RHS operands are identical, or this is a subtract,
1436 if (operand_equal_p (rhs1
, rhs2
, 0) || subtract_p
)
1439 /* Otherwise, record another interpretation assuming RHS2 is the
1440 base expression and RHS1 is the stride, again provided that the
1441 stride is not a pointer. */
1442 if (!POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1444 c2
= create_add_ssa_cand (gs
, rhs2
, rhs1
, false, speed
);
1446 c
->next_interp
= c2
->cand_num
;
1448 add_cand_for_stmt (gs
, c2
);
1453 /* Record an interpretation for the add-immediate. */
1454 widest_int index
= wi::to_widest (rhs2
);
1458 c
= create_add_imm_cand (gs
, rhs1
, index
, speed
);
1460 /* Add the interpretation to the statement-candidate mapping. */
1461 add_cand_for_stmt (gs
, c
);
1465 /* Given GS which is a negate of a scalar integer, make an appropriate
1466 entry in the candidate table. A negate is equivalent to a multiply
1470 slsr_process_neg (gimple
*gs
, tree rhs1
, bool speed
)
1472 /* Record a CAND_MULT interpretation for the multiply by -1. */
1473 slsr_cand_t c
= create_mul_imm_cand (gs
, rhs1
, integer_minus_one_node
, speed
);
1475 /* Add the interpretation to the statement-candidate mapping. */
1476 add_cand_for_stmt (gs
, c
);
1479 /* Help function for legal_cast_p, operating on two trees. Checks
1480 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1481 for more details. */
1484 legal_cast_p_1 (tree lhs_type
, tree rhs_type
)
1486 unsigned lhs_size
, rhs_size
;
1487 bool lhs_wraps
, rhs_wraps
;
1489 lhs_size
= TYPE_PRECISION (lhs_type
);
1490 rhs_size
= TYPE_PRECISION (rhs_type
);
1491 lhs_wraps
= ANY_INTEGRAL_TYPE_P (lhs_type
) && TYPE_OVERFLOW_WRAPS (lhs_type
);
1492 rhs_wraps
= ANY_INTEGRAL_TYPE_P (rhs_type
) && TYPE_OVERFLOW_WRAPS (rhs_type
);
1494 if (lhs_size
< rhs_size
1495 || (rhs_wraps
&& !lhs_wraps
)
1496 || (rhs_wraps
&& lhs_wraps
&& rhs_size
!= lhs_size
))
1502 /* Return TRUE if GS is a statement that defines an SSA name from
1503 a conversion and is legal for us to combine with an add and multiply
1504 in the candidate table. For example, suppose we have:
1510 Without the type-cast, we would create a CAND_MULT for D with base B,
1511 index i, and stride S. We want to record this candidate only if it
1512 is equivalent to apply the type cast following the multiply:
1518 We will record the type with the candidate for D. This allows us
1519 to use a similar previous candidate as a basis. If we have earlier seen
1525 we can replace D with
1527 D = D' + (i - i') * S;
1529 But if moving the type-cast would change semantics, we mustn't do this.
1531 This is legitimate for casts from a non-wrapping integral type to
1532 any integral type of the same or larger size. It is not legitimate
1533 to convert a wrapping type to a non-wrapping type, or to a wrapping
1534 type of a different size. I.e., with a wrapping type, we must
1535 assume that the addition B + i could wrap, in which case performing
1536 the multiply before or after one of the "illegal" type casts will
1537 have different semantics. */
1540 legal_cast_p (gimple
*gs
, tree rhs
)
1542 if (!is_gimple_assign (gs
)
1543 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
)))
1546 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs
)), TREE_TYPE (rhs
));
1549 /* Given GS which is a cast to a scalar integer type, determine whether
1550 the cast is legal for strength reduction. If so, make at least one
1551 appropriate entry in the candidate table. */
1554 slsr_process_cast (gimple
*gs
, tree rhs1
, bool speed
)
1557 slsr_cand_t base_cand
, c
= NULL
, c2
;
1558 unsigned savings
= 0;
1560 if (!legal_cast_p (gs
, rhs1
))
1563 lhs
= gimple_assign_lhs (gs
);
1564 base_cand
= base_cand_from_table (rhs1
);
1565 ctype
= TREE_TYPE (lhs
);
1567 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1571 /* Propagate all data from the base candidate except the type,
1572 which comes from the cast, and the base candidate's cast,
1573 which is no longer applicable. */
1574 if (has_single_use (rhs1
))
1575 savings
= (base_cand
->dead_savings
1576 + stmt_cost (base_cand
->cand_stmt
, speed
));
1578 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1579 base_cand
->base_expr
,
1580 base_cand
->index
, base_cand
->stride
,
1581 ctype
, base_cand
->stride_type
,
1583 if (base_cand
->next_interp
)
1584 base_cand
= lookup_cand (base_cand
->next_interp
);
1591 /* If nothing is known about the RHS, create fresh CAND_ADD and
1592 CAND_MULT interpretations:
1597 The first of these is somewhat arbitrary, but the choice of
1598 1 for the stride simplifies the logic for propagating casts
1600 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, 0,
1601 integer_one_node
, ctype
, sizetype
, 0);
1602 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, 0,
1603 integer_one_node
, ctype
, sizetype
, 0);
1604 c
->next_interp
= c2
->cand_num
;
1607 /* Add the first (or only) interpretation to the statement-candidate
1609 add_cand_for_stmt (gs
, c
);
1612 /* Given GS which is a copy of a scalar integer type, make at least one
1613 appropriate entry in the candidate table.
1615 This interface is included for completeness, but is unnecessary
1616 if this pass immediately follows a pass that performs copy
1617 propagation, such as DOM. */
1620 slsr_process_copy (gimple
*gs
, tree rhs1
, bool speed
)
1622 slsr_cand_t base_cand
, c
= NULL
, c2
;
1623 unsigned savings
= 0;
1625 base_cand
= base_cand_from_table (rhs1
);
1627 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1631 /* Propagate all data from the base candidate. */
1632 if (has_single_use (rhs1
))
1633 savings
= (base_cand
->dead_savings
1634 + stmt_cost (base_cand
->cand_stmt
, speed
));
1636 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1637 base_cand
->base_expr
,
1638 base_cand
->index
, base_cand
->stride
,
1639 base_cand
->cand_type
,
1640 base_cand
->stride_type
, savings
);
1641 if (base_cand
->next_interp
)
1642 base_cand
= lookup_cand (base_cand
->next_interp
);
1649 /* If nothing is known about the RHS, create fresh CAND_ADD and
1650 CAND_MULT interpretations:
1655 The first of these is somewhat arbitrary, but the choice of
1656 1 for the stride simplifies the logic for propagating casts
1658 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, 0,
1659 integer_one_node
, TREE_TYPE (rhs1
),
1661 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, 0,
1662 integer_one_node
, TREE_TYPE (rhs1
),
1664 c
->next_interp
= c2
->cand_num
;
1667 /* Add the first (or only) interpretation to the statement-candidate
1669 add_cand_for_stmt (gs
, c
);
1672 class find_candidates_dom_walker
: public dom_walker
1675 find_candidates_dom_walker (cdi_direction direction
)
1676 : dom_walker (direction
) {}
1677 virtual edge
before_dom_children (basic_block
);
1680 /* Find strength-reduction candidates in block BB. */
1683 find_candidates_dom_walker::before_dom_children (basic_block bb
)
1685 bool speed
= optimize_bb_for_speed_p (bb
);
1687 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1689 slsr_process_phi (gsi
.phi (), speed
);
1691 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1694 gimple
*gs
= gsi_stmt (gsi
);
1696 if (gimple_vuse (gs
) && gimple_assign_single_p (gs
))
1697 slsr_process_ref (gs
);
1699 else if (is_gimple_assign (gs
)
1700 && SCALAR_INT_MODE_P
1701 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))))
1703 tree rhs1
= NULL_TREE
, rhs2
= NULL_TREE
;
1705 switch (gimple_assign_rhs_code (gs
))
1709 rhs1
= gimple_assign_rhs1 (gs
);
1710 rhs2
= gimple_assign_rhs2 (gs
);
1711 /* Should never happen, but currently some buggy situations
1712 in earlier phases put constants in rhs1. */
1713 if (TREE_CODE (rhs1
) != SSA_NAME
)
1717 /* Possible future opportunity: rhs1 of a ptr+ can be
1719 case POINTER_PLUS_EXPR
:
1721 rhs2
= gimple_assign_rhs2 (gs
);
1727 rhs1
= gimple_assign_rhs1 (gs
);
1728 if (TREE_CODE (rhs1
) != SSA_NAME
)
1736 switch (gimple_assign_rhs_code (gs
))
1739 slsr_process_mul (gs
, rhs1
, rhs2
, speed
);
1743 case POINTER_PLUS_EXPR
:
1745 slsr_process_add (gs
, rhs1
, rhs2
, speed
);
1749 slsr_process_neg (gs
, rhs1
, speed
);
1753 slsr_process_cast (gs
, rhs1
, speed
);
1757 slsr_process_copy (gs
, rhs1
, speed
);
1768 /* Dump a candidate for debug. */
1771 dump_candidate (slsr_cand_t c
)
1773 fprintf (dump_file
, "%3d [%d] ", c
->cand_num
,
1774 gimple_bb (c
->cand_stmt
)->index
);
1775 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1779 fputs (" MULT : (", dump_file
);
1780 print_generic_expr (dump_file
, c
->base_expr
, 0);
1781 fputs (" + ", dump_file
);
1782 print_decs (c
->index
, dump_file
);
1783 fputs (") * ", dump_file
);
1784 if (TREE_CODE (c
->stride
) != INTEGER_CST
1785 && c
->stride_type
!= TREE_TYPE (c
->stride
))
1787 fputs ("(", dump_file
);
1788 print_generic_expr (dump_file
, c
->stride_type
, 0);
1789 fputs (")", dump_file
);
1791 print_generic_expr (dump_file
, c
->stride
, 0);
1792 fputs (" : ", dump_file
);
1795 fputs (" ADD : ", dump_file
);
1796 print_generic_expr (dump_file
, c
->base_expr
, 0);
1797 fputs (" + (", dump_file
);
1798 print_decs (c
->index
, dump_file
);
1799 fputs (" * ", dump_file
);
1800 if (TREE_CODE (c
->stride
) != INTEGER_CST
1801 && c
->stride_type
!= TREE_TYPE (c
->stride
))
1803 fputs ("(", dump_file
);
1804 print_generic_expr (dump_file
, c
->stride_type
, 0);
1805 fputs (")", dump_file
);
1807 print_generic_expr (dump_file
, c
->stride
, 0);
1808 fputs (") : ", dump_file
);
1811 fputs (" REF : ", dump_file
);
1812 print_generic_expr (dump_file
, c
->base_expr
, 0);
1813 fputs (" + (", dump_file
);
1814 print_generic_expr (dump_file
, c
->stride
, 0);
1815 fputs (") + ", dump_file
);
1816 print_decs (c
->index
, dump_file
);
1817 fputs (" : ", dump_file
);
1820 fputs (" PHI : ", dump_file
);
1821 print_generic_expr (dump_file
, c
->base_expr
, 0);
1822 fputs (" + (unknown * ", dump_file
);
1823 print_generic_expr (dump_file
, c
->stride
, 0);
1824 fputs (") : ", dump_file
);
1829 print_generic_expr (dump_file
, c
->cand_type
, 0);
1830 fprintf (dump_file
, "\n basis: %d dependent: %d sibling: %d\n",
1831 c
->basis
, c
->dependent
, c
->sibling
);
1832 fprintf (dump_file
, " next-interp: %d dead-savings: %d\n",
1833 c
->next_interp
, c
->dead_savings
);
1835 fprintf (dump_file
, " phi: %d\n", c
->def_phi
);
1836 fputs ("\n", dump_file
);
1839 /* Dump the candidate vector for debug. */
1842 dump_cand_vec (void)
1847 fprintf (dump_file
, "\nStrength reduction candidate vector:\n\n");
1849 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
1853 /* Callback used to dump the candidate chains hash table. */
1856 ssa_base_cand_dump_callback (cand_chain
**slot
, void *ignored ATTRIBUTE_UNUSED
)
1858 const_cand_chain_t chain
= *slot
;
1861 print_generic_expr (dump_file
, chain
->base_expr
, 0);
1862 fprintf (dump_file
, " -> %d", chain
->cand
->cand_num
);
1864 for (p
= chain
->next
; p
; p
= p
->next
)
1865 fprintf (dump_file
, " -> %d", p
->cand
->cand_num
);
1867 fputs ("\n", dump_file
);
1871 /* Dump the candidate chains. */
1874 dump_cand_chains (void)
1876 fprintf (dump_file
, "\nStrength reduction candidate chains:\n\n");
1877 base_cand_map
->traverse_noresize
<void *, ssa_base_cand_dump_callback
>
1879 fputs ("\n", dump_file
);
1882 /* Dump the increment vector for debug. */
1885 dump_incr_vec (void)
1887 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1891 fprintf (dump_file
, "\nIncrement vector:\n\n");
1893 for (i
= 0; i
< incr_vec_len
; i
++)
1895 fprintf (dump_file
, "%3d increment: ", i
);
1896 print_decs (incr_vec
[i
].incr
, dump_file
);
1897 fprintf (dump_file
, "\n count: %d", incr_vec
[i
].count
);
1898 fprintf (dump_file
, "\n cost: %d", incr_vec
[i
].cost
);
1899 fputs ("\n initializer: ", dump_file
);
1900 print_generic_expr (dump_file
, incr_vec
[i
].initializer
, 0);
1901 fputs ("\n\n", dump_file
);
1906 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1910 replace_ref (tree
*expr
, slsr_cand_t c
)
1912 tree add_expr
, mem_ref
, acc_type
= TREE_TYPE (*expr
);
1913 unsigned HOST_WIDE_INT misalign
;
1916 /* Ensure the memory reference carries the minimum alignment
1917 requirement for the data type. See PR58041. */
1918 get_object_alignment_1 (*expr
, &align
, &misalign
);
1920 align
= least_bit_hwi (misalign
);
1921 if (align
< TYPE_ALIGN (acc_type
))
1922 acc_type
= build_aligned_type (acc_type
, align
);
1924 add_expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (c
->base_expr
),
1925 c
->base_expr
, c
->stride
);
1926 mem_ref
= fold_build2 (MEM_REF
, acc_type
, add_expr
,
1927 wide_int_to_tree (c
->cand_type
, c
->index
));
1929 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1930 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1931 TREE_OPERAND (mem_ref
, 0)
1932 = force_gimple_operand_gsi (&gsi
, TREE_OPERAND (mem_ref
, 0),
1933 /*simple_p=*/true, NULL
,
1934 /*before=*/true, GSI_SAME_STMT
);
1935 copy_ref_info (mem_ref
, *expr
);
1937 update_stmt (c
->cand_stmt
);
1940 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1941 dependent of candidate C with an equivalent strength-reduced data
1945 replace_refs (slsr_cand_t c
)
1947 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1949 fputs ("Replacing reference: ", dump_file
);
1950 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1953 if (gimple_vdef (c
->cand_stmt
))
1955 tree
*lhs
= gimple_assign_lhs_ptr (c
->cand_stmt
);
1956 replace_ref (lhs
, c
);
1960 tree
*rhs
= gimple_assign_rhs1_ptr (c
->cand_stmt
);
1961 replace_ref (rhs
, c
);
1964 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1966 fputs ("With: ", dump_file
);
1967 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1968 fputs ("\n", dump_file
);
1972 replace_refs (lookup_cand (c
->sibling
));
1975 replace_refs (lookup_cand (c
->dependent
));
1978 /* Return TRUE if candidate C is dependent upon a PHI. */
1981 phi_dependent_cand_p (slsr_cand_t c
)
1983 /* A candidate is not necessarily dependent upon a PHI just because
1984 it has a phi definition for its base name. It may have a basis
1985 that relies upon the same phi definition, in which case the PHI
1986 is irrelevant to this candidate. */
1989 && lookup_cand (c
->basis
)->def_phi
!= c
->def_phi
);
1992 /* Calculate the increment required for candidate C relative to
1996 cand_increment (slsr_cand_t c
)
2000 /* If the candidate doesn't have a basis, just return its own
2001 index. This is useful in record_increments to help us find
2002 an existing initializer. Also, if the candidate's basis is
2003 hidden by a phi, then its own index will be the increment
2004 from the newly introduced phi basis. */
2005 if (!c
->basis
|| phi_dependent_cand_p (c
))
2008 basis
= lookup_cand (c
->basis
);
2009 gcc_assert (operand_equal_p (c
->base_expr
, basis
->base_expr
, 0));
2010 return c
->index
- basis
->index
;
2013 /* Calculate the increment required for candidate C relative to
2014 its basis. If we aren't going to generate pointer arithmetic
2015 for this candidate, return the absolute value of that increment
2018 static inline widest_int
2019 cand_abs_increment (slsr_cand_t c
)
2021 widest_int increment
= cand_increment (c
);
2023 if (!address_arithmetic_p
&& wi::neg_p (increment
))
2024 increment
= -increment
;
2029 /* Return TRUE iff candidate C has already been replaced under
2030 another interpretation. */
2033 cand_already_replaced (slsr_cand_t c
)
2035 return (gimple_bb (c
->cand_stmt
) == 0);
2038 /* Common logic used by replace_unconditional_candidate and
2039 replace_conditional_candidate. */
2042 replace_mult_candidate (slsr_cand_t c
, tree basis_name
, widest_int bump
)
2044 tree target_type
= TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
));
2045 enum tree_code cand_code
= gimple_assign_rhs_code (c
->cand_stmt
);
2047 /* It is highly unlikely, but possible, that the resulting
2048 bump doesn't fit in a HWI. Abandon the replacement
2049 in this case. This does not affect siblings or dependents
2050 of C. Restriction to signed HWI is conservative for unsigned
2051 types but allows for safe negation without twisted logic. */
2052 if (wi::fits_shwi_p (bump
)
2053 && bump
.to_shwi () != HOST_WIDE_INT_MIN
2054 /* It is not useful to replace casts, copies, or adds of
2055 an SSA name and a constant. */
2056 && cand_code
!= SSA_NAME
2057 && !CONVERT_EXPR_CODE_P (cand_code
)
2058 && cand_code
!= PLUS_EXPR
2059 && cand_code
!= POINTER_PLUS_EXPR
2060 && cand_code
!= MINUS_EXPR
)
2062 enum tree_code code
= PLUS_EXPR
;
2064 gimple
*stmt_to_print
= NULL
;
2066 /* If the basis name and the candidate's LHS have incompatible
2067 types, introduce a cast. */
2068 if (!useless_type_conversion_p (target_type
, TREE_TYPE (basis_name
)))
2069 basis_name
= introduce_cast_before_cand (c
, target_type
, basis_name
);
2070 if (wi::neg_p (bump
))
2076 bump_tree
= wide_int_to_tree (target_type
, bump
);
2078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2080 fputs ("Replacing: ", dump_file
);
2081 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
2086 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
2087 gassign
*copy_stmt
= gimple_build_assign (lhs
, basis_name
);
2088 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
2089 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
2090 gsi_replace (&gsi
, copy_stmt
, false);
2091 c
->cand_stmt
= copy_stmt
;
2092 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2093 stmt_to_print
= copy_stmt
;
2098 if (cand_code
!= NEGATE_EXPR
) {
2099 rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2100 rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2102 if (cand_code
!= NEGATE_EXPR
2103 && ((operand_equal_p (rhs1
, basis_name
, 0)
2104 && operand_equal_p (rhs2
, bump_tree
, 0))
2105 || (operand_equal_p (rhs1
, bump_tree
, 0)
2106 && operand_equal_p (rhs2
, basis_name
, 0))))
2108 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2110 fputs ("(duplicate, not actually replacing)", dump_file
);
2111 stmt_to_print
= c
->cand_stmt
;
2116 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
2117 gimple_assign_set_rhs_with_ops (&gsi
, code
,
2118 basis_name
, bump_tree
);
2119 update_stmt (gsi_stmt (gsi
));
2120 c
->cand_stmt
= gsi_stmt (gsi
);
2121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2122 stmt_to_print
= gsi_stmt (gsi
);
2126 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2128 fputs ("With: ", dump_file
);
2129 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
2130 fputs ("\n", dump_file
);
2135 /* Replace candidate C with an add or subtract. Note that we only
2136 operate on CAND_MULTs with known strides, so we will never generate
2137 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2138 X = Y + ((i - i') * S), as described in the module commentary. The
2139 folded value ((i - i') * S) is referred to here as the "bump." */
2142 replace_unconditional_candidate (slsr_cand_t c
)
2146 if (cand_already_replaced (c
))
2149 basis
= lookup_cand (c
->basis
);
2150 widest_int bump
= cand_increment (c
) * wi::to_widest (c
->stride
);
2152 replace_mult_candidate (c
, gimple_assign_lhs (basis
->cand_stmt
), bump
);
2155 /* Return the index in the increment vector of the given INCREMENT,
2156 or -1 if not found. The latter can occur if more than
2157 MAX_INCR_VEC_LEN increments have been found. */
2160 incr_vec_index (const widest_int
&increment
)
2164 for (i
= 0; i
< incr_vec_len
&& increment
!= incr_vec
[i
].incr
; i
++)
2167 if (i
< incr_vec_len
)
2173 /* Create a new statement along edge E to add BASIS_NAME to the product
2174 of INCREMENT and the stride of candidate C. Create and return a new
2175 SSA name from *VAR to be used as the LHS of the new statement.
2176 KNOWN_STRIDE is true iff C's stride is a constant. */
2179 create_add_on_incoming_edge (slsr_cand_t c
, tree basis_name
,
2180 widest_int increment
, edge e
, location_t loc
,
2183 basic_block insert_bb
;
2184 gimple_stmt_iterator gsi
;
2185 tree lhs
, basis_type
;
2186 gassign
*new_stmt
, *cast_stmt
= NULL
;
2188 /* If the add candidate along this incoming edge has the same
2189 index as C's hidden basis, the hidden basis represents this
2194 basis_type
= TREE_TYPE (basis_name
);
2195 lhs
= make_temp_ssa_name (basis_type
, NULL
, "slsr");
2197 /* Occasionally people convert integers to pointers without a
2198 cast, leading us into trouble if we aren't careful. */
2199 enum tree_code plus_code
2200 = POINTER_TYPE_P (basis_type
) ? POINTER_PLUS_EXPR
: PLUS_EXPR
;
2205 enum tree_code code
= plus_code
;
2206 widest_int bump
= increment
* wi::to_widest (c
->stride
);
2207 if (wi::neg_p (bump
) && !POINTER_TYPE_P (basis_type
))
2213 tree stride_type
= POINTER_TYPE_P (basis_type
) ? sizetype
: basis_type
;
2214 bump_tree
= wide_int_to_tree (stride_type
, bump
);
2215 new_stmt
= gimple_build_assign (lhs
, code
, basis_name
, bump_tree
);
2220 bool negate_incr
= !POINTER_TYPE_P (basis_type
) && wi::neg_p (increment
);
2221 i
= incr_vec_index (negate_incr
? -increment
: increment
);
2222 gcc_assert (i
>= 0);
2224 if (incr_vec
[i
].initializer
)
2226 enum tree_code code
= negate_incr
? MINUS_EXPR
: plus_code
;
2227 new_stmt
= gimple_build_assign (lhs
, code
, basis_name
,
2228 incr_vec
[i
].initializer
);
2233 if (!types_compatible_p (TREE_TYPE (c
->stride
), c
->stride_type
))
2235 tree cast_stride
= make_temp_ssa_name (c
->stride_type
, NULL
,
2237 cast_stmt
= gimple_build_assign (cast_stride
, NOP_EXPR
,
2239 stride
= cast_stride
;
2245 new_stmt
= gimple_build_assign (lhs
, plus_code
, basis_name
, stride
);
2246 else if (increment
== -1)
2247 new_stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, basis_name
, stride
);
2253 insert_bb
= single_succ_p (e
->src
) ? e
->src
: split_edge (e
);
2254 gsi
= gsi_last_bb (insert_bb
);
2256 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
2258 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2261 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
2262 gimple_set_location (cast_stmt
, loc
);
2269 gsi_insert_after (&gsi
, cast_stmt
, GSI_NEW_STMT
);
2270 gimple_set_location (cast_stmt
, loc
);
2272 gsi_insert_after (&gsi
, new_stmt
, GSI_NEW_STMT
);
2275 gimple_set_location (new_stmt
, loc
);
2277 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2281 fprintf (dump_file
, "Inserting cast in block %d: ",
2283 print_gimple_stmt (dump_file
, cast_stmt
, 0, 0);
2285 fprintf (dump_file
, "Inserting in block %d: ", insert_bb
->index
);
2286 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
2292 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2293 is hidden by the phi node FROM_PHI, create a new phi node in the same
2294 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2295 with its phi arguments representing conditional adjustments to the
2296 hidden basis along conditional incoming paths. Those adjustments are
2297 made by creating add statements (and sometimes recursively creating
2298 phis) along those incoming paths. LOC is the location to attach to
2299 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2303 create_phi_basis (slsr_cand_t c
, gimple
*from_phi
, tree basis_name
,
2304 location_t loc
, bool known_stride
)
2309 slsr_cand_t basis
= lookup_cand (c
->basis
);
2310 int nargs
= gimple_phi_num_args (from_phi
);
2311 basic_block phi_bb
= gimple_bb (from_phi
);
2312 slsr_cand_t phi_cand
= *stmt_cand_map
->get (from_phi
);
2313 auto_vec
<tree
> phi_args (nargs
);
2315 /* Process each argument of the existing phi that represents
2316 conditionally-executed add candidates. */
2317 for (i
= 0; i
< nargs
; i
++)
2319 edge e
= (*phi_bb
->preds
)[i
];
2320 tree arg
= gimple_phi_arg_def (from_phi
, i
);
2323 /* If the phi argument is the base name of the CAND_PHI, then
2324 this incoming arc should use the hidden basis. */
2325 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2326 if (basis
->index
== 0)
2327 feeding_def
= gimple_assign_lhs (basis
->cand_stmt
);
2330 widest_int incr
= -basis
->index
;
2331 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, incr
,
2332 e
, loc
, known_stride
);
2336 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2338 /* If there is another phi along this incoming edge, we must
2339 process it in the same fashion to ensure that all basis
2340 adjustments are made along its incoming edges. */
2341 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2342 feeding_def
= create_phi_basis (c
, arg_def
, basis_name
,
2346 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2347 widest_int diff
= arg_cand
->index
- basis
->index
;
2348 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, diff
,
2349 e
, loc
, known_stride
);
2353 /* Because of recursion, we need to save the arguments in a vector
2354 so we can create the PHI statement all at once. Otherwise the
2355 storage for the half-created PHI can be reclaimed. */
2356 phi_args
.safe_push (feeding_def
);
2359 /* Create the new phi basis. */
2360 name
= make_temp_ssa_name (TREE_TYPE (basis_name
), NULL
, "slsr");
2361 phi
= create_phi_node (name
, phi_bb
);
2362 SSA_NAME_DEF_STMT (name
) = phi
;
2364 FOR_EACH_VEC_ELT (phi_args
, i
, phi_arg
)
2366 edge e
= (*phi_bb
->preds
)[i
];
2367 add_phi_arg (phi
, phi_arg
, e
, loc
);
2372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2374 fputs ("Introducing new phi basis: ", dump_file
);
2375 print_gimple_stmt (dump_file
, phi
, 0, 0);
2381 /* Given a candidate C whose basis is hidden by at least one intervening
2382 phi, introduce a matching number of new phis to represent its basis
2383 adjusted by conditional increments along possible incoming paths. Then
2384 replace C as though it were an unconditional candidate, using the new
2388 replace_conditional_candidate (slsr_cand_t c
)
2390 tree basis_name
, name
;
2394 /* Look up the LHS SSA name from C's basis. This will be the
2395 RHS1 of the adds we will introduce to create new phi arguments. */
2396 basis
= lookup_cand (c
->basis
);
2397 basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
2399 /* Create a new phi statement which will represent C's true basis
2400 after the transformation is complete. */
2401 loc
= gimple_location (c
->cand_stmt
);
2402 name
= create_phi_basis (c
, lookup_cand (c
->def_phi
)->cand_stmt
,
2403 basis_name
, loc
, KNOWN_STRIDE
);
2404 /* Replace C with an add of the new basis phi and a constant. */
2405 widest_int bump
= c
->index
* wi::to_widest (c
->stride
);
2407 replace_mult_candidate (c
, name
, bump
);
2410 /* Compute the expected costs of inserting basis adjustments for
2411 candidate C with phi-definition PHI. The cost of inserting
2412 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2413 which are themselves phi results, recursively calculate costs
2414 for those phis as well. */
2417 phi_add_costs (gimple
*phi
, slsr_cand_t c
, int one_add_cost
)
2421 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2423 /* If we work our way back to a phi that isn't dominated by the hidden
2424 basis, this isn't a candidate for replacement. Indicate this by
2425 returning an unreasonably high cost. It's not easy to detect
2426 these situations when determining the basis, so we defer the
2427 decision until now. */
2428 basic_block phi_bb
= gimple_bb (phi
);
2429 slsr_cand_t basis
= lookup_cand (c
->basis
);
2430 basic_block basis_bb
= gimple_bb (basis
->cand_stmt
);
2432 if (phi_bb
== basis_bb
|| !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
2433 return COST_INFINITE
;
2435 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2437 tree arg
= gimple_phi_arg_def (phi
, i
);
2439 if (arg
!= phi_cand
->base_expr
)
2441 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2443 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2444 cost
+= phi_add_costs (arg_def
, c
, one_add_cost
);
2447 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2449 if (arg_cand
->index
!= c
->index
)
2450 cost
+= one_add_cost
;
2458 /* For candidate C, each sibling of candidate C, and each dependent of
2459 candidate C, determine whether the candidate is dependent upon a
2460 phi that hides its basis. If not, replace the candidate unconditionally.
2461 Otherwise, determine whether the cost of introducing compensation code
2462 for the candidate is offset by the gains from strength reduction. If
2463 so, replace the candidate and introduce the compensation code. */
2466 replace_uncond_cands_and_profitable_phis (slsr_cand_t c
)
2468 if (phi_dependent_cand_p (c
))
2470 if (c
->kind
== CAND_MULT
)
2472 /* A candidate dependent upon a phi will replace a multiply by
2473 a constant with an add, and will insert at most one add for
2474 each phi argument. Add these costs with the potential dead-code
2475 savings to determine profitability. */
2476 bool speed
= optimize_bb_for_speed_p (gimple_bb (c
->cand_stmt
));
2477 int mult_savings
= stmt_cost (c
->cand_stmt
, speed
);
2478 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2479 tree phi_result
= gimple_phi_result (phi
);
2480 int one_add_cost
= add_cost (speed
,
2481 TYPE_MODE (TREE_TYPE (phi_result
)));
2482 int add_costs
= one_add_cost
+ phi_add_costs (phi
, c
, one_add_cost
);
2483 int cost
= add_costs
- mult_savings
- c
->dead_savings
;
2485 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2487 fprintf (dump_file
, " Conditional candidate %d:\n", c
->cand_num
);
2488 fprintf (dump_file
, " add_costs = %d\n", add_costs
);
2489 fprintf (dump_file
, " mult_savings = %d\n", mult_savings
);
2490 fprintf (dump_file
, " dead_savings = %d\n", c
->dead_savings
);
2491 fprintf (dump_file
, " cost = %d\n", cost
);
2492 if (cost
<= COST_NEUTRAL
)
2493 fputs (" Replacing...\n", dump_file
);
2495 fputs (" Not replaced.\n", dump_file
);
2498 if (cost
<= COST_NEUTRAL
)
2499 replace_conditional_candidate (c
);
2503 replace_unconditional_candidate (c
);
2506 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->sibling
));
2509 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->dependent
));
2512 /* Count the number of candidates in the tree rooted at C that have
2513 not already been replaced under other interpretations. */
2516 count_candidates (slsr_cand_t c
)
2518 unsigned count
= cand_already_replaced (c
) ? 0 : 1;
2521 count
+= count_candidates (lookup_cand (c
->sibling
));
2524 count
+= count_candidates (lookup_cand (c
->dependent
));
2529 /* Increase the count of INCREMENT by one in the increment vector.
2530 INCREMENT is associated with candidate C. If INCREMENT is to be
2531 conditionally executed as part of a conditional candidate replacement,
2532 IS_PHI_ADJUST is true, otherwise false. If an initializer
2533 T_0 = stride * I is provided by a candidate that dominates all
2534 candidates with the same increment, also record T_0 for subsequent use. */
2537 record_increment (slsr_cand_t c
, widest_int increment
, bool is_phi_adjust
)
2542 /* Treat increments that differ only in sign as identical so as to
2543 share initializers, unless we are generating pointer arithmetic. */
2544 if (!address_arithmetic_p
&& wi::neg_p (increment
))
2545 increment
= -increment
;
2547 for (i
= 0; i
< incr_vec_len
; i
++)
2549 if (incr_vec
[i
].incr
== increment
)
2551 incr_vec
[i
].count
++;
2554 /* If we previously recorded an initializer that doesn't
2555 dominate this candidate, it's not going to be useful to
2557 if (incr_vec
[i
].initializer
2558 && !dominated_by_p (CDI_DOMINATORS
,
2559 gimple_bb (c
->cand_stmt
),
2560 incr_vec
[i
].init_bb
))
2562 incr_vec
[i
].initializer
= NULL_TREE
;
2563 incr_vec
[i
].init_bb
= NULL
;
2570 if (!found
&& incr_vec_len
< MAX_INCR_VEC_LEN
- 1)
2572 /* The first time we see an increment, create the entry for it.
2573 If this is the root candidate which doesn't have a basis, set
2574 the count to zero. We're only processing it so it can possibly
2575 provide an initializer for other candidates. */
2576 incr_vec
[incr_vec_len
].incr
= increment
;
2577 incr_vec
[incr_vec_len
].count
= c
->basis
|| is_phi_adjust
? 1 : 0;
2578 incr_vec
[incr_vec_len
].cost
= COST_INFINITE
;
2580 /* Optimistically record the first occurrence of this increment
2581 as providing an initializer (if it does); we will revise this
2582 opinion later if it doesn't dominate all other occurrences.
2583 Exception: increments of 0, 1 never need initializers;
2584 and phi adjustments don't ever provide initializers. */
2585 if (c
->kind
== CAND_ADD
2587 && c
->index
== increment
2588 && (increment
> 1 || increment
< 0)
2589 && (gimple_assign_rhs_code (c
->cand_stmt
) == PLUS_EXPR
2590 || gimple_assign_rhs_code (c
->cand_stmt
) == POINTER_PLUS_EXPR
))
2592 tree t0
= NULL_TREE
;
2593 tree rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2594 tree rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2595 if (operand_equal_p (rhs1
, c
->base_expr
, 0))
2597 else if (operand_equal_p (rhs2
, c
->base_expr
, 0))
2600 && SSA_NAME_DEF_STMT (t0
)
2601 && gimple_bb (SSA_NAME_DEF_STMT (t0
)))
2603 incr_vec
[incr_vec_len
].initializer
= t0
;
2604 incr_vec
[incr_vec_len
++].init_bb
2605 = gimple_bb (SSA_NAME_DEF_STMT (t0
));
2609 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2610 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2615 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2616 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2621 /* Given phi statement PHI that hides a candidate from its BASIS, find
2622 the increments along each incoming arc (recursively handling additional
2623 phis that may be present) and record them. These increments are the
2624 difference in index between the index-adjusting statements and the
2625 index of the basis. */
2628 record_phi_increments (slsr_cand_t basis
, gimple
*phi
)
2631 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2633 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2635 tree arg
= gimple_phi_arg_def (phi
, i
);
2637 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2639 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2641 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2642 record_phi_increments (basis
, arg_def
);
2645 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2646 widest_int diff
= arg_cand
->index
- basis
->index
;
2647 record_increment (arg_cand
, diff
, PHI_ADJUST
);
2653 /* Determine how many times each unique increment occurs in the set
2654 of candidates rooted at C's parent, recording the data in the
2655 increment vector. For each unique increment I, if an initializer
2656 T_0 = stride * I is provided by a candidate that dominates all
2657 candidates with the same increment, also record T_0 for subsequent
2661 record_increments (slsr_cand_t c
)
2663 if (!cand_already_replaced (c
))
2665 if (!phi_dependent_cand_p (c
))
2666 record_increment (c
, cand_increment (c
), NOT_PHI_ADJUST
);
2669 /* A candidate with a basis hidden by a phi will have one
2670 increment for its relationship to the index represented by
2671 the phi, and potentially additional increments along each
2672 incoming edge. For the root of the dependency tree (which
2673 has no basis), process just the initial index in case it has
2674 an initializer that can be used by subsequent candidates. */
2675 record_increment (c
, c
->index
, NOT_PHI_ADJUST
);
2678 record_phi_increments (lookup_cand (c
->basis
),
2679 lookup_cand (c
->def_phi
)->cand_stmt
);
2684 record_increments (lookup_cand (c
->sibling
));
2687 record_increments (lookup_cand (c
->dependent
));
2690 /* Add up and return the costs of introducing add statements that
2691 require the increment INCR on behalf of candidate C and phi
2692 statement PHI. Accumulate into *SAVINGS the potential savings
2693 from removing existing statements that feed PHI and have no other
2697 phi_incr_cost (slsr_cand_t c
, const widest_int
&incr
, gimple
*phi
,
2702 slsr_cand_t basis
= lookup_cand (c
->basis
);
2703 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
2705 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2707 tree arg
= gimple_phi_arg_def (phi
, i
);
2709 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2711 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
2713 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2715 int feeding_savings
= 0;
2716 cost
+= phi_incr_cost (c
, incr
, arg_def
, &feeding_savings
);
2717 if (has_single_use (gimple_phi_result (arg_def
)))
2718 *savings
+= feeding_savings
;
2722 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2723 widest_int diff
= arg_cand
->index
- basis
->index
;
2727 tree basis_lhs
= gimple_assign_lhs (basis
->cand_stmt
);
2728 tree lhs
= gimple_assign_lhs (arg_cand
->cand_stmt
);
2729 cost
+= add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs
)));
2730 if (has_single_use (lhs
))
2731 *savings
+= stmt_cost (arg_cand
->cand_stmt
, true);
2740 /* Return the first candidate in the tree rooted at C that has not
2741 already been replaced, favoring siblings over dependents. */
2744 unreplaced_cand_in_tree (slsr_cand_t c
)
2746 if (!cand_already_replaced (c
))
2751 slsr_cand_t sib
= unreplaced_cand_in_tree (lookup_cand (c
->sibling
));
2758 slsr_cand_t dep
= unreplaced_cand_in_tree (lookup_cand (c
->dependent
));
2766 /* Return TRUE if the candidates in the tree rooted at C should be
2767 optimized for speed, else FALSE. We estimate this based on the block
2768 containing the most dominant candidate in the tree that has not yet
2772 optimize_cands_for_speed_p (slsr_cand_t c
)
2774 slsr_cand_t c2
= unreplaced_cand_in_tree (c
);
2776 return optimize_bb_for_speed_p (gimple_bb (c2
->cand_stmt
));
2779 /* Add COST_IN to the lowest cost of any dependent path starting at
2780 candidate C or any of its siblings, counting only candidates along
2781 such paths with increment INCR. Assume that replacing a candidate
2782 reduces cost by REPL_SAVINGS. Also account for savings from any
2783 statements that would go dead. If COUNT_PHIS is true, include
2784 costs of introducing feeding statements for conditional candidates. */
2787 lowest_cost_path (int cost_in
, int repl_savings
, slsr_cand_t c
,
2788 const widest_int
&incr
, bool count_phis
)
2790 int local_cost
, sib_cost
, savings
= 0;
2791 widest_int cand_incr
= cand_abs_increment (c
);
2793 if (cand_already_replaced (c
))
2794 local_cost
= cost_in
;
2795 else if (incr
== cand_incr
)
2796 local_cost
= cost_in
- repl_savings
- c
->dead_savings
;
2798 local_cost
= cost_in
- c
->dead_savings
;
2801 && phi_dependent_cand_p (c
)
2802 && !cand_already_replaced (c
))
2804 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2805 local_cost
+= phi_incr_cost (c
, incr
, phi
, &savings
);
2807 if (has_single_use (gimple_phi_result (phi
)))
2808 local_cost
-= savings
;
2812 local_cost
= lowest_cost_path (local_cost
, repl_savings
,
2813 lookup_cand (c
->dependent
), incr
,
2818 sib_cost
= lowest_cost_path (cost_in
, repl_savings
,
2819 lookup_cand (c
->sibling
), incr
,
2821 local_cost
= MIN (local_cost
, sib_cost
);
2827 /* Compute the total savings that would accrue from all replacements
2828 in the candidate tree rooted at C, counting only candidates with
2829 increment INCR. Assume that replacing a candidate reduces cost
2830 by REPL_SAVINGS. Also account for savings from statements that
2834 total_savings (int repl_savings
, slsr_cand_t c
, const widest_int
&incr
,
2838 widest_int cand_incr
= cand_abs_increment (c
);
2840 if (incr
== cand_incr
&& !cand_already_replaced (c
))
2841 savings
+= repl_savings
+ c
->dead_savings
;
2844 && phi_dependent_cand_p (c
)
2845 && !cand_already_replaced (c
))
2847 int phi_savings
= 0;
2848 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2849 savings
-= phi_incr_cost (c
, incr
, phi
, &phi_savings
);
2851 if (has_single_use (gimple_phi_result (phi
)))
2852 savings
+= phi_savings
;
2856 savings
+= total_savings (repl_savings
, lookup_cand (c
->dependent
), incr
,
2860 savings
+= total_savings (repl_savings
, lookup_cand (c
->sibling
), incr
,
2866 /* Use target-specific costs to determine and record which increments
2867 in the current candidate tree are profitable to replace, assuming
2868 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2871 One slight limitation here is that we don't account for the possible
2872 introduction of casts in some cases. See replace_one_candidate for
2873 the cases where these are introduced. This should probably be cleaned
2877 analyze_increments (slsr_cand_t first_dep
, machine_mode mode
, bool speed
)
2881 for (i
= 0; i
< incr_vec_len
; i
++)
2883 HOST_WIDE_INT incr
= incr_vec
[i
].incr
.to_shwi ();
2885 /* If somehow this increment is bigger than a HWI, we won't
2886 be optimizing candidates that use it. And if the increment
2887 has a count of zero, nothing will be done with it. */
2888 if (!wi::fits_shwi_p (incr_vec
[i
].incr
) || !incr_vec
[i
].count
)
2889 incr_vec
[i
].cost
= COST_INFINITE
;
2891 /* Increments of 0, 1, and -1 are always profitable to replace,
2892 because they always replace a multiply or add with an add or
2893 copy, and may cause one or more existing instructions to go
2894 dead. Exception: -1 can't be assumed to be profitable for
2895 pointer addition. */
2899 && !POINTER_TYPE_P (first_dep
->cand_type
)))
2900 incr_vec
[i
].cost
= COST_NEUTRAL
;
2902 /* If we need to add an initializer, give up if a cast from the
2903 candidate's type to its stride's type can lose precision.
2904 Note that this already takes into account that the stride may
2905 have been cast to a wider type, in which case this test won't
2911 _4 = x + _3; ADD: x + (10 * (int)_1) : int
2913 _6 = x + _5; ADD: x + (15 * (int)_1) : int
2915 Although the stride was a short int initially, the stride
2916 used in the analysis has been widened to an int, and such
2917 widening will be done in the initializer as well. */
2918 else if (!incr_vec
[i
].initializer
2919 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2920 && !legal_cast_p_1 (first_dep
->stride_type
,
2921 TREE_TYPE (gimple_assign_lhs
2922 (first_dep
->cand_stmt
))))
2923 incr_vec
[i
].cost
= COST_INFINITE
;
2925 /* If we need to add an initializer, make sure we don't introduce
2926 a multiply by a pointer type, which can happen in certain cast
2928 else if (!incr_vec
[i
].initializer
2929 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2930 && POINTER_TYPE_P (first_dep
->stride_type
))
2931 incr_vec
[i
].cost
= COST_INFINITE
;
2933 /* For any other increment, if this is a multiply candidate, we
2934 must introduce a temporary T and initialize it with
2935 T_0 = stride * increment. When optimizing for speed, walk the
2936 candidate tree to calculate the best cost reduction along any
2937 path; if it offsets the fixed cost of inserting the initializer,
2938 replacing the increment is profitable. When optimizing for
2939 size, instead calculate the total cost reduction from replacing
2940 all candidates with this increment. */
2941 else if (first_dep
->kind
== CAND_MULT
)
2943 int cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2944 int repl_savings
= mul_cost (speed
, mode
) - add_cost (speed
, mode
);
2946 cost
= lowest_cost_path (cost
, repl_savings
, first_dep
,
2947 incr_vec
[i
].incr
, COUNT_PHIS
);
2949 cost
-= total_savings (repl_savings
, first_dep
, incr_vec
[i
].incr
,
2952 incr_vec
[i
].cost
= cost
;
2955 /* If this is an add candidate, the initializer may already
2956 exist, so only calculate the cost of the initializer if it
2957 doesn't. We are replacing one add with another here, so the
2958 known replacement savings is zero. We will account for removal
2959 of dead instructions in lowest_cost_path or total_savings. */
2963 if (!incr_vec
[i
].initializer
)
2964 cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2967 cost
= lowest_cost_path (cost
, 0, first_dep
, incr_vec
[i
].incr
,
2970 cost
-= total_savings (0, first_dep
, incr_vec
[i
].incr
,
2973 incr_vec
[i
].cost
= cost
;
2978 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2979 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2980 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2981 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2982 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2985 ncd_for_two_cands (basic_block bb1
, basic_block bb2
,
2986 slsr_cand_t c1
, slsr_cand_t c2
, slsr_cand_t
*where
)
3002 ncd
= nearest_common_dominator (CDI_DOMINATORS
, bb1
, bb2
);
3004 /* If both candidates are in the same block, the earlier
3006 if (bb1
== ncd
&& bb2
== ncd
)
3008 if (!c1
|| (c2
&& c2
->cand_num
< c1
->cand_num
))
3014 /* Otherwise, if one of them produced a candidate in the
3015 dominator, that one wins. */
3016 else if (bb1
== ncd
)
3019 else if (bb2
== ncd
)
3022 /* If neither matches the dominator, neither wins. */
3029 /* Consider all candidates that feed PHI. Find the nearest common
3030 dominator of those candidates requiring the given increment INCR.
3031 Further find and return the nearest common dominator of this result
3032 with block NCD. If the returned block contains one or more of the
3033 candidates, return the earliest candidate in the block in *WHERE. */
3036 ncd_with_phi (slsr_cand_t c
, const widest_int
&incr
, gphi
*phi
,
3037 basic_block ncd
, slsr_cand_t
*where
)
3040 slsr_cand_t basis
= lookup_cand (c
->basis
);
3041 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
3043 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3045 tree arg
= gimple_phi_arg_def (phi
, i
);
3047 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3049 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
3051 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3052 ncd
= ncd_with_phi (c
, incr
, as_a
<gphi
*> (arg_def
), ncd
,
3056 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3057 widest_int diff
= arg_cand
->index
- basis
->index
;
3058 basic_block pred
= gimple_phi_arg_edge (phi
, i
)->src
;
3060 if ((incr
== diff
) || (!address_arithmetic_p
&& incr
== -diff
))
3061 ncd
= ncd_for_two_cands (ncd
, pred
, *where
, NULL
, where
);
3069 /* Consider the candidate C together with any candidates that feed
3070 C's phi dependence (if any). Find and return the nearest common
3071 dominator of those candidates requiring the given increment INCR.
3072 If the returned block contains one or more of the candidates,
3073 return the earliest candidate in the block in *WHERE. */
3076 ncd_of_cand_and_phis (slsr_cand_t c
, const widest_int
&incr
, slsr_cand_t
*where
)
3078 basic_block ncd
= NULL
;
3080 if (cand_abs_increment (c
) == incr
)
3082 ncd
= gimple_bb (c
->cand_stmt
);
3086 if (phi_dependent_cand_p (c
))
3087 ncd
= ncd_with_phi (c
, incr
,
3088 as_a
<gphi
*> (lookup_cand (c
->def_phi
)->cand_stmt
),
3094 /* Consider all candidates in the tree rooted at C for which INCR
3095 represents the required increment of C relative to its basis.
3096 Find and return the basic block that most nearly dominates all
3097 such candidates. If the returned block contains one or more of
3098 the candidates, return the earliest candidate in the block in
3102 nearest_common_dominator_for_cands (slsr_cand_t c
, const widest_int
&incr
,
3105 basic_block sib_ncd
= NULL
, dep_ncd
= NULL
, this_ncd
= NULL
, ncd
;
3106 slsr_cand_t sib_where
= NULL
, dep_where
= NULL
, this_where
= NULL
, new_where
;
3108 /* First find the NCD of all siblings and dependents. */
3110 sib_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->sibling
),
3113 dep_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->dependent
),
3115 if (!sib_ncd
&& !dep_ncd
)
3120 else if (sib_ncd
&& !dep_ncd
)
3122 new_where
= sib_where
;
3125 else if (dep_ncd
&& !sib_ncd
)
3127 new_where
= dep_where
;
3131 ncd
= ncd_for_two_cands (sib_ncd
, dep_ncd
, sib_where
,
3132 dep_where
, &new_where
);
3134 /* If the candidate's increment doesn't match the one we're interested
3135 in (and nor do any increments for feeding defs of a phi-dependence),
3136 then the result depends only on siblings and dependents. */
3137 this_ncd
= ncd_of_cand_and_phis (c
, incr
, &this_where
);
3139 if (!this_ncd
|| cand_already_replaced (c
))
3145 /* Otherwise, compare this candidate with the result from all siblings
3147 ncd
= ncd_for_two_cands (ncd
, this_ncd
, new_where
, this_where
, where
);
3152 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3155 profitable_increment_p (unsigned index
)
3157 return (incr_vec
[index
].cost
<= COST_NEUTRAL
);
3160 /* For each profitable increment in the increment vector not equal to
3161 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3162 dominator of all statements in the candidate chain rooted at C
3163 that require that increment, and insert an initializer
3164 T_0 = stride * increment at that location. Record T_0 with the
3165 increment record. */
3168 insert_initializers (slsr_cand_t c
)
3172 for (i
= 0; i
< incr_vec_len
; i
++)
3175 slsr_cand_t where
= NULL
;
3177 gassign
*cast_stmt
= NULL
;
3178 tree new_name
, incr_tree
, init_stride
;
3179 widest_int incr
= incr_vec
[i
].incr
;
3181 if (!profitable_increment_p (i
)
3184 && (!POINTER_TYPE_P (lookup_cand (c
->basis
)->cand_type
)))
3188 /* We may have already identified an existing initializer that
3190 if (incr_vec
[i
].initializer
)
3192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3194 fputs ("Using existing initializer: ", dump_file
);
3195 print_gimple_stmt (dump_file
,
3196 SSA_NAME_DEF_STMT (incr_vec
[i
].initializer
),
3202 /* Find the block that most closely dominates all candidates
3203 with this increment. If there is at least one candidate in
3204 that block, the earliest one will be returned in WHERE. */
3205 bb
= nearest_common_dominator_for_cands (c
, incr
, &where
);
3207 /* If the nominal stride has a different type than the recorded
3208 stride type, build a cast from the nominal stride to that type. */
3209 if (!types_compatible_p (TREE_TYPE (c
->stride
), c
->stride_type
))
3211 init_stride
= make_temp_ssa_name (c
->stride_type
, NULL
, "slsr");
3212 cast_stmt
= gimple_build_assign (init_stride
, NOP_EXPR
, c
->stride
);
3215 init_stride
= c
->stride
;
3217 /* Create a new SSA name to hold the initializer's value. */
3218 new_name
= make_temp_ssa_name (c
->stride_type
, NULL
, "slsr");
3219 incr_vec
[i
].initializer
= new_name
;
3221 /* Create the initializer and insert it in the latest possible
3222 dominating position. */
3223 incr_tree
= wide_int_to_tree (c
->stride_type
, incr
);
3224 init_stmt
= gimple_build_assign (new_name
, MULT_EXPR
,
3225 init_stride
, incr_tree
);
3228 gimple_stmt_iterator gsi
= gsi_for_stmt (where
->cand_stmt
);
3229 location_t loc
= gimple_location (where
->cand_stmt
);
3233 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3234 gimple_set_location (cast_stmt
, loc
);
3237 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3238 gimple_set_location (init_stmt
, loc
);
3242 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3243 gimple
*basis_stmt
= lookup_cand (c
->basis
)->cand_stmt
;
3244 location_t loc
= gimple_location (basis_stmt
);
3246 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
3250 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3251 gimple_set_location (cast_stmt
, loc
);
3253 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
3259 gsi_insert_after (&gsi
, cast_stmt
, GSI_NEW_STMT
);
3260 gimple_set_location (cast_stmt
, loc
);
3262 gsi_insert_after (&gsi
, init_stmt
, GSI_SAME_STMT
);
3265 gimple_set_location (init_stmt
, gimple_location (basis_stmt
));
3268 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3272 fputs ("Inserting stride cast: ", dump_file
);
3273 print_gimple_stmt (dump_file
, cast_stmt
, 0, 0);
3275 fputs ("Inserting initializer: ", dump_file
);
3276 print_gimple_stmt (dump_file
, init_stmt
, 0, 0);
3281 /* Return TRUE iff all required increments for candidates feeding PHI
3282 are profitable to replace on behalf of candidate C. */
3285 all_phi_incrs_profitable (slsr_cand_t c
, gimple
*phi
)
3288 slsr_cand_t basis
= lookup_cand (c
->basis
);
3289 slsr_cand_t phi_cand
= *stmt_cand_map
->get (phi
);
3291 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3293 tree arg
= gimple_phi_arg_def (phi
, i
);
3295 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3297 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
3299 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3301 if (!all_phi_incrs_profitable (c
, arg_def
))
3307 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3308 widest_int increment
= arg_cand
->index
- basis
->index
;
3310 if (!address_arithmetic_p
&& wi::neg_p (increment
))
3311 increment
= -increment
;
3313 j
= incr_vec_index (increment
);
3315 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3317 fprintf (dump_file
, " Conditional candidate %d, phi: ",
3319 print_gimple_stmt (dump_file
, phi
, 0, 0);
3320 fputs (" increment: ", dump_file
);
3321 print_decs (increment
, dump_file
);
3324 "\n Not replaced; incr_vec overflow.\n");
3326 fprintf (dump_file
, "\n cost: %d\n", incr_vec
[j
].cost
);
3327 if (profitable_increment_p (j
))
3328 fputs (" Replacing...\n", dump_file
);
3330 fputs (" Not replaced.\n", dump_file
);
3334 if (j
< 0 || !profitable_increment_p (j
))
3343 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3344 type TO_TYPE, and insert it in front of the statement represented
3345 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3346 the new SSA name. */
3349 introduce_cast_before_cand (slsr_cand_t c
, tree to_type
, tree from_expr
)
3353 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3355 cast_lhs
= make_temp_ssa_name (to_type
, NULL
, "slsr");
3356 cast_stmt
= gimple_build_assign (cast_lhs
, NOP_EXPR
, from_expr
);
3357 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3358 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3360 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3362 fputs (" Inserting: ", dump_file
);
3363 print_gimple_stmt (dump_file
, cast_stmt
, 0, 0);
3369 /* Replace the RHS of the statement represented by candidate C with
3370 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3371 leave C unchanged or just interchange its operands. The original
3372 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3373 If the replacement was made and we are doing a details dump,
3374 return the revised statement, else NULL. */
3377 replace_rhs_if_not_dup (enum tree_code new_code
, tree new_rhs1
, tree new_rhs2
,
3378 enum tree_code old_code
, tree old_rhs1
, tree old_rhs2
,
3381 if (new_code
!= old_code
3382 || ((!operand_equal_p (new_rhs1
, old_rhs1
, 0)
3383 || !operand_equal_p (new_rhs2
, old_rhs2
, 0))
3384 && (!operand_equal_p (new_rhs1
, old_rhs2
, 0)
3385 || !operand_equal_p (new_rhs2
, old_rhs1
, 0))))
3387 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3388 gimple_assign_set_rhs_with_ops (&gsi
, new_code
, new_rhs1
, new_rhs2
);
3389 update_stmt (gsi_stmt (gsi
));
3390 c
->cand_stmt
= gsi_stmt (gsi
);
3392 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3393 return gsi_stmt (gsi
);
3396 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3397 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3402 /* Strength-reduce the statement represented by candidate C by replacing
3403 it with an equivalent addition or subtraction. I is the index into
3404 the increment vector identifying C's increment. NEW_VAR is used to
3405 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3406 is the rhs1 to use in creating the add/subtract. */
3409 replace_one_candidate (slsr_cand_t c
, unsigned i
, tree basis_name
)
3411 gimple
*stmt_to_print
= NULL
;
3412 tree orig_rhs1
, orig_rhs2
;
3414 enum tree_code orig_code
, repl_code
;
3415 widest_int cand_incr
;
3417 orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3418 orig_rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
3419 orig_rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
3420 cand_incr
= cand_increment (c
);
3422 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3424 fputs ("Replacing: ", dump_file
);
3425 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
3426 stmt_to_print
= c
->cand_stmt
;
3429 if (address_arithmetic_p
)
3430 repl_code
= POINTER_PLUS_EXPR
;
3432 repl_code
= PLUS_EXPR
;
3434 /* If the increment has an initializer T_0, replace the candidate
3435 statement with an add of the basis name and the initializer. */
3436 if (incr_vec
[i
].initializer
)
3438 tree init_type
= TREE_TYPE (incr_vec
[i
].initializer
);
3439 tree orig_type
= TREE_TYPE (orig_rhs2
);
3441 if (types_compatible_p (orig_type
, init_type
))
3442 rhs2
= incr_vec
[i
].initializer
;
3444 rhs2
= introduce_cast_before_cand (c
, orig_type
,
3445 incr_vec
[i
].initializer
);
3447 if (incr_vec
[i
].incr
!= cand_incr
)
3449 gcc_assert (repl_code
== PLUS_EXPR
);
3450 repl_code
= MINUS_EXPR
;
3453 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3454 orig_code
, orig_rhs1
, orig_rhs2
,
3458 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3459 with a subtract of the stride from the basis name, a copy
3460 from the basis name, or an add of the stride to the basis
3461 name, respectively. It may be necessary to introduce a
3462 cast (or reuse an existing cast). */
3463 else if (cand_incr
== 1)
3465 tree stride_type
= TREE_TYPE (c
->stride
);
3466 tree orig_type
= TREE_TYPE (orig_rhs2
);
3468 if (types_compatible_p (orig_type
, stride_type
))
3471 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3473 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3474 orig_code
, orig_rhs1
, orig_rhs2
,
3478 else if (cand_incr
== -1)
3480 tree stride_type
= TREE_TYPE (c
->stride
);
3481 tree orig_type
= TREE_TYPE (orig_rhs2
);
3482 gcc_assert (repl_code
!= POINTER_PLUS_EXPR
);
3484 if (types_compatible_p (orig_type
, stride_type
))
3487 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
);
3489 if (orig_code
!= MINUS_EXPR
3490 || !operand_equal_p (basis_name
, orig_rhs1
, 0)
3491 || !operand_equal_p (rhs2
, orig_rhs2
, 0))
3493 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3494 gimple_assign_set_rhs_with_ops (&gsi
, MINUS_EXPR
, basis_name
, rhs2
);
3495 update_stmt (gsi_stmt (gsi
));
3496 c
->cand_stmt
= gsi_stmt (gsi
);
3498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3499 stmt_to_print
= gsi_stmt (gsi
);
3501 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3502 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3505 else if (cand_incr
== 0)
3507 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
3508 tree lhs_type
= TREE_TYPE (lhs
);
3509 tree basis_type
= TREE_TYPE (basis_name
);
3511 if (types_compatible_p (lhs_type
, basis_type
))
3513 gassign
*copy_stmt
= gimple_build_assign (lhs
, basis_name
);
3514 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3515 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
3516 gsi_replace (&gsi
, copy_stmt
, false);
3517 c
->cand_stmt
= copy_stmt
;
3519 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3520 stmt_to_print
= copy_stmt
;
3524 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3525 gassign
*cast_stmt
= gimple_build_assign (lhs
, NOP_EXPR
, basis_name
);
3526 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3527 gsi_replace (&gsi
, cast_stmt
, false);
3528 c
->cand_stmt
= cast_stmt
;
3530 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3531 stmt_to_print
= cast_stmt
;
3537 if (dump_file
&& (dump_flags
& TDF_DETAILS
) && stmt_to_print
)
3539 fputs ("With: ", dump_file
);
3540 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
3541 fputs ("\n", dump_file
);
3545 /* For each candidate in the tree rooted at C, replace it with
3546 an increment if such has been shown to be profitable. */
3549 replace_profitable_candidates (slsr_cand_t c
)
3551 if (!cand_already_replaced (c
))
3553 widest_int increment
= cand_abs_increment (c
);
3554 enum tree_code orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3557 i
= incr_vec_index (increment
);
3559 /* Only process profitable increments. Nothing useful can be done
3560 to a cast or copy. */
3562 && profitable_increment_p (i
)
3563 && orig_code
!= SSA_NAME
3564 && !CONVERT_EXPR_CODE_P (orig_code
))
3566 if (phi_dependent_cand_p (c
))
3568 gimple
*phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
3570 if (all_phi_incrs_profitable (c
, phi
))
3572 /* Look up the LHS SSA name from C's basis. This will be
3573 the RHS1 of the adds we will introduce to create new
3575 slsr_cand_t basis
= lookup_cand (c
->basis
);
3576 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3578 /* Create a new phi statement that will represent C's true
3579 basis after the transformation is complete. */
3580 location_t loc
= gimple_location (c
->cand_stmt
);
3581 tree name
= create_phi_basis (c
, phi
, basis_name
,
3582 loc
, UNKNOWN_STRIDE
);
3584 /* Replace C with an add of the new basis phi and the
3586 replace_one_candidate (c
, i
, name
);
3591 slsr_cand_t basis
= lookup_cand (c
->basis
);
3592 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3593 replace_one_candidate (c
, i
, basis_name
);
3599 replace_profitable_candidates (lookup_cand (c
->sibling
));
3602 replace_profitable_candidates (lookup_cand (c
->dependent
));
3605 /* Analyze costs of related candidates in the candidate vector,
3606 and make beneficial replacements. */
3609 analyze_candidates_and_replace (void)
3614 /* Each candidate that has a null basis and a non-null
3615 dependent is the root of a tree of related statements.
3616 Analyze each tree to determine a subset of those
3617 statements that can be replaced with maximum benefit. */
3618 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
3620 slsr_cand_t first_dep
;
3622 if (c
->basis
!= 0 || c
->dependent
== 0)
3625 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3626 fprintf (dump_file
, "\nProcessing dependency tree rooted at %d.\n",
3629 first_dep
= lookup_cand (c
->dependent
);
3631 /* If this is a chain of CAND_REFs, unconditionally replace
3632 each of them with a strength-reduced data reference. */
3633 if (c
->kind
== CAND_REF
)
3636 /* If the common stride of all related candidates is a known
3637 constant, each candidate without a phi-dependence can be
3638 profitably replaced. Each replaces a multiply by a single
3639 add, with the possibility that a feeding add also goes dead.
3640 A candidate with a phi-dependence is replaced only if the
3641 compensation code it requires is offset by the strength
3642 reduction savings. */
3643 else if (TREE_CODE (c
->stride
) == INTEGER_CST
)
3644 replace_uncond_cands_and_profitable_phis (first_dep
);
3646 /* When the stride is an SSA name, it may still be profitable
3647 to replace some or all of the dependent candidates, depending
3648 on whether the introduced increments can be reused, or are
3649 less expensive to calculate than the replaced statements. */
3655 /* Determine whether we'll be generating pointer arithmetic
3656 when replacing candidates. */
3657 address_arithmetic_p
= (c
->kind
== CAND_ADD
3658 && POINTER_TYPE_P (c
->cand_type
));
3660 /* If all candidates have already been replaced under other
3661 interpretations, nothing remains to be done. */
3662 if (!count_candidates (c
))
3665 /* Construct an array of increments for this candidate chain. */
3666 incr_vec
= XNEWVEC (incr_info
, MAX_INCR_VEC_LEN
);
3668 record_increments (c
);
3670 /* Determine which increments are profitable to replace. */
3671 mode
= TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
)));
3672 speed
= optimize_cands_for_speed_p (c
);
3673 analyze_increments (first_dep
, mode
, speed
);
3675 /* Insert initializers of the form T_0 = stride * increment
3676 for use in profitable replacements. */
3677 insert_initializers (first_dep
);
3680 /* Perform the replacements. */
3681 replace_profitable_candidates (first_dep
);
3689 const pass_data pass_data_strength_reduction
=
3691 GIMPLE_PASS
, /* type */
3693 OPTGROUP_NONE
, /* optinfo_flags */
3694 TV_GIMPLE_SLSR
, /* tv_id */
3695 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3696 0, /* properties_provided */
3697 0, /* properties_destroyed */
3698 0, /* todo_flags_start */
3699 0, /* todo_flags_finish */
3702 class pass_strength_reduction
: public gimple_opt_pass
3705 pass_strength_reduction (gcc::context
*ctxt
)
3706 : gimple_opt_pass (pass_data_strength_reduction
, ctxt
)
3709 /* opt_pass methods: */
3710 virtual bool gate (function
*) { return flag_tree_slsr
; }
3711 virtual unsigned int execute (function
*);
3713 }; // class pass_strength_reduction
3716 pass_strength_reduction::execute (function
*fun
)
3718 /* Create the obstack where candidates will reside. */
3719 gcc_obstack_init (&cand_obstack
);
3721 /* Allocate the candidate vector. */
3722 cand_vec
.create (128);
3724 /* Allocate the mapping from statements to candidate indices. */
3725 stmt_cand_map
= new hash_map
<gimple
*, slsr_cand_t
>;
3727 /* Create the obstack where candidate chains will reside. */
3728 gcc_obstack_init (&chain_obstack
);
3730 /* Allocate the mapping from base expressions to candidate chains. */
3731 base_cand_map
= new hash_table
<cand_chain_hasher
> (500);
3733 /* Allocate the mapping from bases to alternative bases. */
3734 alt_base_map
= new hash_map
<tree
, tree
>;
3736 /* Initialize the loop optimizer. We need to detect flow across
3737 back edges, and this gives us dominator information as well. */
3738 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
3740 /* Walk the CFG in predominator order looking for strength reduction
3742 find_candidates_dom_walker (CDI_DOMINATORS
)
3743 .walk (fun
->cfg
->x_entry_block_ptr
);
3745 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3748 dump_cand_chains ();
3751 delete alt_base_map
;
3752 free_affine_expand_cache (&name_expansions
);
3754 /* Analyze costs and make appropriate replacements. */
3755 analyze_candidates_and_replace ();
3757 loop_optimizer_finalize ();
3758 delete base_cand_map
;
3759 base_cand_map
= NULL
;
3760 obstack_free (&chain_obstack
, NULL
);
3761 delete stmt_cand_map
;
3762 cand_vec
.release ();
3763 obstack_free (&cand_obstack
, NULL
);
3771 make_pass_strength_reduction (gcc::context
*ctxt
)
3773 return new pass_strength_reduction (ctxt
);