1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
38 #include "coretypes.h"
41 #include "basic-block.h"
42 #include "tree-pass.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-flow.h"
47 #include "pointer-set.h"
50 #include "hash-table.h"
52 /* Information about a strength reduction candidate. Each statement
53 in the candidate table represents an expression of one of the
54 following forms (the special case of CAND_REF will be described
57 (CAND_MULT) S1: X = (B + i) * S
58 (CAND_ADD) S1: X = B + (i * S)
60 Here X and B are SSA names, i is an integer constant, and S is
61 either an SSA name or a constant. We call B the "base," i the
62 "index", and S the "stride."
64 Any statement S0 that dominates S1 and is of the form:
66 (CAND_MULT) S0: Y = (B + i') * S
67 (CAND_ADD) S0: Y = B + (i' * S)
69 is called a "basis" for S1. In both cases, S1 may be replaced by
71 S1': X = Y + (i - i') * S,
73 where (i - i') * S is folded to the extent possible.
75 All gimple statements are visited in dominator order, and each
76 statement that may contribute to one of the forms of S1 above is
77 given at least one entry in the candidate table. Such statements
78 include addition, pointer addition, subtraction, multiplication,
79 negation, copies, and nontrivial type casts. If a statement may
80 represent more than one expression of the forms of S1 above,
81 multiple "interpretations" are stored in the table and chained
84 * An add of two SSA names may treat either operand as the base.
85 * A multiply of two SSA names, likewise.
86 * A copy or cast may be thought of as either a CAND_MULT with
87 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
89 Candidate records are allocated from an obstack. They are addressed
90 both from a hash table keyed on S1, and from a vector of candidate
91 pointers arranged in predominator order.
95 Currently we don't recognize:
100 as a strength reduction opportunity, even though this S1 would
101 also be replaceable by the S1' above. This can be added if it
102 comes up in practice.
104 Strength reduction in addressing
105 --------------------------------
106 There is another kind of candidate known as CAND_REF. A CAND_REF
107 describes a statement containing a memory reference having
108 complex addressing that might benefit from strength reduction.
109 Specifically, we are interested in references for which
110 get_inner_reference returns a base address, offset, and bitpos as
113 base: MEM_REF (T1, C1)
114 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
115 bitpos: C4 * BITS_PER_UNIT
117 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
118 arbitrary integer constants. Note that C2 may be zero, in which
119 case the offset will be MULT_EXPR (T2, C3).
121 When this pattern is recognized, the original memory reference
122 can be replaced with:
124 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
127 which distributes the multiply to allow constant folding. When
128 two or more addressing expressions can be represented by MEM_REFs
129 of this form, differing only in the constants C1, C2, and C4,
130 making this substitution produces more efficient addressing during
131 the RTL phases. When there are not at least two expressions with
132 the same values of T1, T2, and C3, there is nothing to be gained
135 Strength reduction of CAND_REFs uses the same infrastructure as
136 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
137 field, MULT_EXPR (T2, C3) in the stride (S) field, and
138 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
139 is thus another CAND_REF with the same B and S values. When at
140 least two CAND_REFs are chained together using the basis relation,
141 each of them is replaced as above, resulting in improved code
142 generation for addressing.
144 Conditional candidates
145 ======================
147 Conditional candidates are best illustrated with an example.
148 Consider the code sequence:
151 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
153 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
154 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
155 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
156 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
158 Here strength reduction is complicated by the uncertain value of x_2.
159 A legitimate transformation is:
168 (4) [x_2 = PHI <x_0, x_1>;]
169 (4a) t_2 = PHI <a_0, t_1>;
173 where the bracketed instructions may go dead.
175 To recognize this opportunity, we have to observe that statement (6)
176 has a "hidden basis" (2). The hidden basis is unlike a normal basis
177 in that the statement and the hidden basis have different base SSA
178 names (x_2 and x_0, respectively). The relationship is established
179 when a statement's base name (x_2) is defined by a phi statement (4),
180 each argument of which (x_0, x_1) has an identical "derived base name."
181 If the argument is defined by a candidate (as x_1 is by (3)) that is a
182 CAND_ADD having a stride of 1, the derived base name of the argument is
183 the base name of the candidate (x_0). Otherwise, the argument itself
184 is its derived base name (as is the case with argument x_0).
186 The hidden basis for statement (6) is the nearest dominating candidate
187 whose base name is the derived base name (x_0) of the feeding phi (4),
188 and whose stride is identical to that of the statement. We can then
189 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
190 allowing the final replacement of (6) by the strength-reduced (6r).
192 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
193 A CAND_PHI is not a candidate for replacement, but is maintained in the
194 candidate table to ease discovery of hidden bases. Any phi statement
195 whose arguments share a common derived base name is entered into the
196 table with the derived base name, an (arbitrary) index of zero, and a
197 stride of 1. A statement with a hidden basis can then be detected by
198 simply looking up its feeding phi definition in the candidate table,
199 extracting the derived base name, and searching for a basis in the
200 usual manner after substituting the derived base name.
202 Note that the transformation is only valid when the original phi and
203 the statements that define the phi's arguments are all at the same
204 position in the loop hierarchy. */
207 /* Index into the candidate vector, offset by 1. VECs are zero-based,
208 while cand_idx's are one-based, with zero indicating null. */
209 typedef unsigned cand_idx
;
211 /* The kind of candidate. */
222 /* The candidate statement S1. */
225 /* The base expression B: often an SSA name, but not always. */
231 /* The index constant i. */
234 /* The type of the candidate. This is normally the type of base_expr,
235 but casts may have occurred when combining feeding instructions.
236 A candidate can only be a basis for candidates of the same final type.
237 (For CAND_REFs, this is the type to be used for operand 1 of the
238 replacement MEM_REF.) */
241 /* The kind of candidate (CAND_MULT, etc.). */
244 /* Index of this candidate in the candidate vector. */
247 /* Index of the next candidate record for the same statement.
248 A statement may be useful in more than one way (e.g., due to
249 commutativity). So we can have multiple "interpretations"
251 cand_idx next_interp
;
253 /* Index of the basis statement S0, if any, in the candidate vector. */
256 /* First candidate for which this candidate is a basis, if one exists. */
259 /* Next candidate having the same basis as this one. */
262 /* If this is a conditional candidate, the CAND_PHI candidate
263 that defines the base SSA name B. */
266 /* Savings that can be expected from eliminating dead code if this
267 candidate is replaced. */
271 typedef struct slsr_cand_d slsr_cand
, *slsr_cand_t
;
272 typedef const struct slsr_cand_d
*const_slsr_cand_t
;
274 /* Pointers to candidates are chained together as part of a mapping
275 from base expressions to the candidates that use them. */
279 /* Base expression for the chain of candidates: often, but not
280 always, an SSA name. */
283 /* Pointer to a candidate. */
287 struct cand_chain_d
*next
;
291 typedef struct cand_chain_d cand_chain
, *cand_chain_t
;
292 typedef const struct cand_chain_d
*const_cand_chain_t
;
294 /* Information about a unique "increment" associated with candidates
295 having an SSA name for a stride. An increment is the difference
296 between the index of the candidate and the index of its basis,
297 i.e., (i - i') as discussed in the module commentary.
299 When we are not going to generate address arithmetic we treat
300 increments that differ only in sign as the same, allowing sharing
301 of the cost of initializers. The absolute value of the increment
302 is stored in the incr_info. */
306 /* The increment that relates a candidate to its basis. */
309 /* How many times the increment occurs in the candidate tree. */
312 /* Cost of replacing candidates using this increment. Negative and
313 zero costs indicate replacement should be performed. */
316 /* If this increment is profitable but is not -1, 0, or 1, it requires
317 an initializer T_0 = stride * incr to be found or introduced in the
318 nearest common dominator of all candidates. This field holds T_0
319 for subsequent use. */
322 /* If the initializer was found to already exist, this is the block
323 where it was found. */
327 typedef struct incr_info_d incr_info
, *incr_info_t
;
329 /* Candidates are maintained in a vector. If candidate X dominates
330 candidate Y, then X appears before Y in the vector; but the
331 converse does not necessarily hold. */
332 static vec
<slsr_cand_t
> cand_vec
;
346 enum phi_adjust_status
352 enum count_phis_status
358 /* Pointer map embodying a mapping from statements to candidates. */
359 static struct pointer_map_t
*stmt_cand_map
;
361 /* Obstack for candidates. */
362 static struct obstack cand_obstack
;
364 /* Obstack for candidate chains. */
365 static struct obstack chain_obstack
;
367 /* An array INCR_VEC of incr_infos is used during analysis of related
368 candidates having an SSA name for a stride. INCR_VEC_LEN describes
369 its current length. */
370 static incr_info_t incr_vec
;
371 static unsigned incr_vec_len
;
373 /* For a chain of candidates with unknown stride, indicates whether or not
374 we must generate pointer arithmetic when replacing statements. */
375 static bool address_arithmetic_p
;
377 /* Forward function declarations. */
378 static slsr_cand_t
base_cand_from_table (tree
);
379 static tree
introduce_cast_before_cand (slsr_cand_t
, tree
, tree
, tree
*);
381 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
384 lookup_cand (cand_idx idx
)
386 return cand_vec
[idx
- 1];
389 /* Helper for hashing a candidate chain header. */
391 struct cand_chain_hasher
: typed_noop_remove
<cand_chain
>
393 typedef cand_chain value_type
;
394 typedef cand_chain compare_type
;
395 static inline hashval_t
hash (const value_type
*);
396 static inline bool equal (const value_type
*, const compare_type
*);
400 cand_chain_hasher::hash (const value_type
*p
)
402 tree base_expr
= p
->base_expr
;
403 return iterative_hash_expr (base_expr
, 0);
407 cand_chain_hasher::equal (const value_type
*chain1
, const compare_type
*chain2
)
409 return operand_equal_p (chain1
->base_expr
, chain2
->base_expr
, 0);
412 /* Hash table embodying a mapping from base exprs to chains of candidates. */
413 static hash_table
<cand_chain_hasher
> base_cand_map
;
415 /* Look in the candidate table for a CAND_PHI that defines BASE and
416 return it if found; otherwise return NULL. */
419 find_phi_def (tree base
)
423 if (TREE_CODE (base
) != SSA_NAME
)
426 c
= base_cand_from_table (base
);
428 if (!c
|| c
->kind
!= CAND_PHI
)
434 /* Helper routine for find_basis_for_candidate. May be called twice:
435 once for the candidate's base expr, and optionally again for the
436 candidate's phi definition. */
439 find_basis_for_base_expr (slsr_cand_t c
, tree base_expr
)
441 cand_chain mapping_key
;
443 slsr_cand_t basis
= NULL
;
445 // Limit potential of N^2 behavior for long candidate chains.
447 int max_iters
= PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN
);
449 mapping_key
.base_expr
= base_expr
;
450 chain
= base_cand_map
.find (&mapping_key
);
452 for (; chain
&& iters
< max_iters
; chain
= chain
->next
, ++iters
)
454 slsr_cand_t one_basis
= chain
->cand
;
456 if (one_basis
->kind
!= c
->kind
457 || one_basis
->cand_stmt
== c
->cand_stmt
458 || !operand_equal_p (one_basis
->stride
, c
->stride
, 0)
459 || !types_compatible_p (one_basis
->cand_type
, c
->cand_type
)
460 || !dominated_by_p (CDI_DOMINATORS
,
461 gimple_bb (c
->cand_stmt
),
462 gimple_bb (one_basis
->cand_stmt
)))
465 if (!basis
|| basis
->cand_num
< one_basis
->cand_num
)
472 /* Use the base expr from candidate C to look for possible candidates
473 that can serve as a basis for C. Each potential basis must also
474 appear in a block that dominates the candidate statement and have
475 the same stride and type. If more than one possible basis exists,
476 the one with highest index in the vector is chosen; this will be
477 the most immediately dominating basis. */
480 find_basis_for_candidate (slsr_cand_t c
)
482 slsr_cand_t basis
= find_basis_for_base_expr (c
, c
->base_expr
);
484 /* If a candidate doesn't have a basis using its base expression,
485 it may have a basis hidden by one or more intervening phis. */
486 if (!basis
&& c
->def_phi
)
488 basic_block basis_bb
, phi_bb
;
489 slsr_cand_t phi_cand
= lookup_cand (c
->def_phi
);
490 basis
= find_basis_for_base_expr (c
, phi_cand
->base_expr
);
494 /* A hidden basis must dominate the phi-definition of the
495 candidate's base name. */
496 phi_bb
= gimple_bb (phi_cand
->cand_stmt
);
497 basis_bb
= gimple_bb (basis
->cand_stmt
);
499 if (phi_bb
== basis_bb
500 || !dominated_by_p (CDI_DOMINATORS
, phi_bb
, basis_bb
))
506 /* If we found a hidden basis, estimate additional dead-code
507 savings if the phi and its feeding statements can be removed. */
508 if (basis
&& has_single_use (gimple_phi_result (phi_cand
->cand_stmt
)))
509 c
->dead_savings
+= phi_cand
->dead_savings
;
515 c
->sibling
= basis
->dependent
;
516 basis
->dependent
= c
->cand_num
;
517 return basis
->cand_num
;
523 /* Record a mapping from the base expression of C to C itself, indicating that
524 C may potentially serve as a basis using that base expression. */
527 record_potential_basis (slsr_cand_t c
)
532 node
= (cand_chain_t
) obstack_alloc (&chain_obstack
, sizeof (cand_chain
));
533 node
->base_expr
= c
->base_expr
;
536 slot
= base_cand_map
.find_slot (node
, INSERT
);
540 cand_chain_t head
= (cand_chain_t
) (*slot
);
541 node
->next
= head
->next
;
548 /* Allocate storage for a new candidate and initialize its fields.
549 Attempt to find a basis for the candidate. */
552 alloc_cand_and_find_basis (enum cand_kind kind
, gimple gs
, tree base
,
553 double_int index
, tree stride
, tree ctype
,
556 slsr_cand_t c
= (slsr_cand_t
) obstack_alloc (&cand_obstack
,
562 c
->cand_type
= ctype
;
564 c
->cand_num
= cand_vec
.length () + 1;
568 c
->def_phi
= find_phi_def (base
);
569 c
->dead_savings
= savings
;
571 cand_vec
.safe_push (c
);
573 if (kind
== CAND_PHI
)
576 c
->basis
= find_basis_for_candidate (c
);
578 record_potential_basis (c
);
583 /* Determine the target cost of statement GS when compiling according
587 stmt_cost (gimple gs
, bool speed
)
589 tree lhs
, rhs1
, rhs2
;
590 enum machine_mode lhs_mode
;
592 gcc_assert (is_gimple_assign (gs
));
593 lhs
= gimple_assign_lhs (gs
);
594 rhs1
= gimple_assign_rhs1 (gs
);
595 lhs_mode
= TYPE_MODE (TREE_TYPE (lhs
));
597 switch (gimple_assign_rhs_code (gs
))
600 rhs2
= gimple_assign_rhs2 (gs
);
602 if (host_integerp (rhs2
, 0))
603 return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2
), lhs_mode
, speed
);
605 gcc_assert (TREE_CODE (rhs1
) != INTEGER_CST
);
606 return mul_cost (speed
, lhs_mode
);
609 case POINTER_PLUS_EXPR
:
611 return add_cost (speed
, lhs_mode
);
614 return neg_cost (speed
, lhs_mode
);
617 return convert_cost (lhs_mode
, TYPE_MODE (TREE_TYPE (rhs1
)), speed
);
619 /* Note that we don't assign costs to copies that in most cases
629 /* Look up the defining statement for BASE_IN and return a pointer
630 to its candidate in the candidate table, if any; otherwise NULL.
631 Only CAND_ADD and CAND_MULT candidates are returned. */
634 base_cand_from_table (tree base_in
)
638 gimple def
= SSA_NAME_DEF_STMT (base_in
);
640 return (slsr_cand_t
) NULL
;
642 result
= (slsr_cand_t
*) pointer_map_contains (stmt_cand_map
, def
);
644 if (result
&& (*result
)->kind
!= CAND_REF
)
647 return (slsr_cand_t
) NULL
;
650 /* Add an entry to the statement-to-candidate mapping. */
653 add_cand_for_stmt (gimple gs
, slsr_cand_t c
)
655 void **slot
= pointer_map_insert (stmt_cand_map
, gs
);
660 // FORNOW: Disable conditional candidate processing until bootstrap
661 // issue can be sorted out for i686-pc-linux-gnu.
663 /* Given PHI which contains a phi statement, determine whether it
664 satisfies all the requirements of a phi candidate. If so, create
665 a candidate. Note that a CAND_PHI never has a basis itself, but
666 is used to help find a basis for subsequent candidates. */
669 slsr_process_phi (gimple phi
, bool speed
)
672 tree arg0_base
= NULL_TREE
, base_type
;
674 struct loop
*cand_loop
= gimple_bb (phi
)->loop_father
;
675 unsigned savings
= 0;
677 /* A CAND_PHI requires each of its arguments to have the same
678 derived base name. (See the module header commentary for a
679 definition of derived base names.) Furthermore, all feeding
680 definitions must be in the same position in the loop hierarchy
683 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
685 slsr_cand_t arg_cand
;
686 tree arg
= gimple_phi_arg_def (phi
, i
);
687 tree derived_base_name
= NULL_TREE
;
688 gimple arg_stmt
= NULL
;
689 basic_block arg_bb
= NULL
;
691 if (TREE_CODE (arg
) != SSA_NAME
)
694 arg_cand
= base_cand_from_table (arg
);
698 while (arg_cand
->kind
!= CAND_ADD
&& arg_cand
->kind
!= CAND_PHI
)
700 if (!arg_cand
->next_interp
)
703 arg_cand
= lookup_cand (arg_cand
->next_interp
);
706 if (!integer_onep (arg_cand
->stride
))
709 derived_base_name
= arg_cand
->base_expr
;
710 arg_stmt
= arg_cand
->cand_stmt
;
711 arg_bb
= gimple_bb (arg_stmt
);
713 /* Gather potential dead code savings if the phi statement
714 can be removed later on. */
715 if (has_single_use (arg
))
717 if (gimple_code (arg_stmt
) == GIMPLE_PHI
)
718 savings
+= arg_cand
->dead_savings
;
720 savings
+= stmt_cost (arg_stmt
, speed
);
725 derived_base_name
= arg
;
727 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
728 arg_bb
= single_succ (ENTRY_BLOCK_PTR
);
730 gimple_bb (SSA_NAME_DEF_STMT (arg
));
733 if (!arg_bb
|| arg_bb
->loop_father
!= cand_loop
)
737 arg0_base
= derived_base_name
;
738 else if (!operand_equal_p (derived_base_name
, arg0_base
, 0))
742 /* Create the candidate. "alloc_cand_and_find_basis" is named
743 misleadingly for this case, as no basis will be sought for a
745 base_type
= TREE_TYPE (arg0_base
);
747 c
= alloc_cand_and_find_basis (CAND_PHI
, phi
, arg0_base
, double_int_zero
,
748 integer_one_node
, base_type
, savings
);
750 /* Add the candidate to the statement-candidate mapping. */
751 add_cand_for_stmt (phi
, c
);
755 /* Look for the following pattern:
757 *PBASE: MEM_REF (T1, C1)
759 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
761 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
763 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
765 *PINDEX: C4 * BITS_PER_UNIT
767 If not present, leave the input values unchanged and return FALSE.
768 Otherwise, modify the input values as follows and return TRUE:
771 *POFFSET: MULT_EXPR (T2, C3)
772 *PINDEX: C1 + (C2 * C3) + C4 */
775 restructure_reference (tree
*pbase
, tree
*poffset
, double_int
*pindex
,
778 tree base
= *pbase
, offset
= *poffset
;
779 double_int index
= *pindex
;
780 double_int bpu
= double_int::from_uhwi (BITS_PER_UNIT
);
781 tree mult_op0
, mult_op1
, t1
, t2
, type
;
782 double_int c1
, c2
, c3
, c4
;
786 || TREE_CODE (base
) != MEM_REF
787 || TREE_CODE (offset
) != MULT_EXPR
788 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
789 || !index
.umod (bpu
, FLOOR_MOD_EXPR
).is_zero ())
792 t1
= TREE_OPERAND (base
, 0);
793 c1
= mem_ref_offset (base
);
794 type
= TREE_TYPE (TREE_OPERAND (base
, 1));
796 mult_op0
= TREE_OPERAND (offset
, 0);
797 mult_op1
= TREE_OPERAND (offset
, 1);
799 c3
= tree_to_double_int (mult_op1
);
801 if (TREE_CODE (mult_op0
) == PLUS_EXPR
)
803 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
805 t2
= TREE_OPERAND (mult_op0
, 0);
806 c2
= tree_to_double_int (TREE_OPERAND (mult_op0
, 1));
811 else if (TREE_CODE (mult_op0
) == MINUS_EXPR
)
813 if (TREE_CODE (TREE_OPERAND (mult_op0
, 1)) == INTEGER_CST
)
815 t2
= TREE_OPERAND (mult_op0
, 0);
816 c2
= -tree_to_double_int (TREE_OPERAND (mult_op0
, 1));
824 c2
= double_int_zero
;
827 c4
= index
.udiv (bpu
, FLOOR_DIV_EXPR
);
830 *poffset
= fold_build2 (MULT_EXPR
, sizetype
, t2
,
831 double_int_to_tree (sizetype
, c3
));
832 *pindex
= c1
+ c2
* c3
+ c4
;
838 /* Given GS which contains a data reference, create a CAND_REF entry in
839 the candidate table and attempt to find a basis. */
842 slsr_process_ref (gimple gs
)
844 tree ref_expr
, base
, offset
, type
;
845 HOST_WIDE_INT bitsize
, bitpos
;
846 enum machine_mode mode
;
847 int unsignedp
, volatilep
;
851 if (gimple_vdef (gs
))
852 ref_expr
= gimple_assign_lhs (gs
);
854 ref_expr
= gimple_assign_rhs1 (gs
);
856 if (!handled_component_p (ref_expr
)
857 || TREE_CODE (ref_expr
) == BIT_FIELD_REF
858 || (TREE_CODE (ref_expr
) == COMPONENT_REF
859 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr
, 1))))
862 base
= get_inner_reference (ref_expr
, &bitsize
, &bitpos
, &offset
, &mode
,
863 &unsignedp
, &volatilep
, false);
864 index
= double_int::from_uhwi (bitpos
);
866 if (!restructure_reference (&base
, &offset
, &index
, &type
))
869 c
= alloc_cand_and_find_basis (CAND_REF
, gs
, base
, index
, offset
,
872 /* Add the candidate to the statement-candidate mapping. */
873 add_cand_for_stmt (gs
, c
);
876 /* Create a candidate entry for a statement GS, where GS multiplies
877 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
878 about the two SSA names into the new candidate. Return the new
882 create_mul_ssa_cand (gimple gs
, tree base_in
, tree stride_in
, bool speed
)
884 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
886 unsigned savings
= 0;
888 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
890 /* Look at all interpretations of the base candidate, if necessary,
891 to find information to propagate into this candidate. */
892 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
895 if (base_cand
->kind
== CAND_MULT
&& integer_onep (base_cand
->stride
))
901 base
= base_cand
->base_expr
;
902 index
= base_cand
->index
;
904 ctype
= base_cand
->cand_type
;
905 if (has_single_use (base_in
))
906 savings
= (base_cand
->dead_savings
907 + stmt_cost (base_cand
->cand_stmt
, speed
));
909 else if (base_cand
->kind
== CAND_ADD
910 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
912 /* Y = B + (i' * S), S constant
914 ============================
915 X = B + ((i' * S) * Z) */
916 base
= base_cand
->base_expr
;
917 index
= base_cand
->index
* tree_to_double_int (base_cand
->stride
);
919 ctype
= base_cand
->cand_type
;
920 if (has_single_use (base_in
))
921 savings
= (base_cand
->dead_savings
922 + stmt_cost (base_cand
->cand_stmt
, speed
));
925 if (base_cand
->next_interp
)
926 base_cand
= lookup_cand (base_cand
->next_interp
);
933 /* No interpretations had anything useful to propagate, so
934 produce X = (Y + 0) * Z. */
936 index
= double_int_zero
;
938 ctype
= TREE_TYPE (base_in
);
941 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
946 /* Create a candidate entry for a statement GS, where GS multiplies
947 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
948 information about BASE_IN into the new candidate. Return the new
952 create_mul_imm_cand (gimple gs
, tree base_in
, tree stride_in
, bool speed
)
954 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
955 double_int index
, temp
;
956 unsigned savings
= 0;
958 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
960 /* Look at all interpretations of the base candidate, if necessary,
961 to find information to propagate into this candidate. */
962 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
964 if (base_cand
->kind
== CAND_MULT
965 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
967 /* Y = (B + i') * S, S constant
969 ============================
970 X = (B + i') * (S * c) */
971 base
= base_cand
->base_expr
;
972 index
= base_cand
->index
;
973 temp
= tree_to_double_int (base_cand
->stride
)
974 * tree_to_double_int (stride_in
);
975 stride
= double_int_to_tree (TREE_TYPE (stride_in
), temp
);
976 ctype
= base_cand
->cand_type
;
977 if (has_single_use (base_in
))
978 savings
= (base_cand
->dead_savings
979 + stmt_cost (base_cand
->cand_stmt
, speed
));
981 else if (base_cand
->kind
== CAND_ADD
&& integer_onep (base_cand
->stride
))
985 ===========================
987 base
= base_cand
->base_expr
;
988 index
= base_cand
->index
;
990 ctype
= base_cand
->cand_type
;
991 if (has_single_use (base_in
))
992 savings
= (base_cand
->dead_savings
993 + stmt_cost (base_cand
->cand_stmt
, speed
));
995 else if (base_cand
->kind
== CAND_ADD
996 && base_cand
->index
.is_one ()
997 && TREE_CODE (base_cand
->stride
) == INTEGER_CST
)
999 /* Y = B + (1 * S), S constant
1001 ===========================
1003 base
= base_cand
->base_expr
;
1004 index
= tree_to_double_int (base_cand
->stride
);
1006 ctype
= base_cand
->cand_type
;
1007 if (has_single_use (base_in
))
1008 savings
= (base_cand
->dead_savings
1009 + stmt_cost (base_cand
->cand_stmt
, speed
));
1012 if (base_cand
->next_interp
)
1013 base_cand
= lookup_cand (base_cand
->next_interp
);
1020 /* No interpretations had anything useful to propagate, so
1021 produce X = (Y + 0) * c. */
1023 index
= double_int_zero
;
1025 ctype
= TREE_TYPE (base_in
);
1028 c
= alloc_cand_and_find_basis (CAND_MULT
, gs
, base
, index
, stride
,
1033 /* Given GS which is a multiply of scalar integers, make an appropriate
1034 entry in the candidate table. If this is a multiply of two SSA names,
1035 create two CAND_MULT interpretations and attempt to find a basis for
1036 each of them. Otherwise, create a single CAND_MULT and attempt to
1040 slsr_process_mul (gimple gs
, tree rhs1
, tree rhs2
, bool speed
)
1044 /* If this is a multiply of an SSA name with itself, it is highly
1045 unlikely that we will get a strength reduction opportunity, so
1046 don't record it as a candidate. This simplifies the logic for
1047 finding a basis, so if this is removed that must be considered. */
1051 if (TREE_CODE (rhs2
) == SSA_NAME
)
1053 /* Record an interpretation of this statement in the candidate table
1054 assuming RHS1 is the base expression and RHS2 is the stride. */
1055 c
= create_mul_ssa_cand (gs
, rhs1
, rhs2
, speed
);
1057 /* Add the first interpretation to the statement-candidate mapping. */
1058 add_cand_for_stmt (gs
, c
);
1060 /* Record another interpretation of this statement assuming RHS1
1061 is the stride and RHS2 is the base expression. */
1062 c2
= create_mul_ssa_cand (gs
, rhs2
, rhs1
, speed
);
1063 c
->next_interp
= c2
->cand_num
;
1067 /* Record an interpretation for the multiply-immediate. */
1068 c
= create_mul_imm_cand (gs
, rhs1
, rhs2
, speed
);
1070 /* Add the interpretation to the statement-candidate mapping. */
1071 add_cand_for_stmt (gs
, c
);
1075 /* Create a candidate entry for a statement GS, where GS adds two
1076 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1077 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1078 information about the two SSA names into the new candidate.
1079 Return the new candidate. */
1082 create_add_ssa_cand (gimple gs
, tree base_in
, tree addend_in
,
1083 bool subtract_p
, bool speed
)
1085 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL
;
1087 unsigned savings
= 0;
1089 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1090 slsr_cand_t addend_cand
= base_cand_from_table (addend_in
);
1092 /* The most useful transformation is a multiply-immediate feeding
1093 an add or subtract. Look for that first. */
1094 while (addend_cand
&& !base
&& addend_cand
->kind
!= CAND_PHI
)
1096 if (addend_cand
->kind
== CAND_MULT
1097 && addend_cand
->index
.is_zero ()
1098 && TREE_CODE (addend_cand
->stride
) == INTEGER_CST
)
1100 /* Z = (B + 0) * S, S constant
1102 ===========================
1103 X = Y + ((+/-1 * S) * B) */
1105 index
= tree_to_double_int (addend_cand
->stride
);
1108 stride
= addend_cand
->base_expr
;
1109 ctype
= TREE_TYPE (base_in
);
1110 if (has_single_use (addend_in
))
1111 savings
= (addend_cand
->dead_savings
1112 + stmt_cost (addend_cand
->cand_stmt
, speed
));
1115 if (addend_cand
->next_interp
)
1116 addend_cand
= lookup_cand (addend_cand
->next_interp
);
1121 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1123 if (base_cand
->kind
== CAND_ADD
1124 && (base_cand
->index
.is_zero ()
1125 || operand_equal_p (base_cand
->stride
,
1126 integer_zero_node
, 0)))
1128 /* Y = B + (i' * S), i' * S = 0
1130 ============================
1131 X = B + (+/-1 * Z) */
1132 base
= base_cand
->base_expr
;
1133 index
= subtract_p
? double_int_minus_one
: double_int_one
;
1135 ctype
= base_cand
->cand_type
;
1136 if (has_single_use (base_in
))
1137 savings
= (base_cand
->dead_savings
1138 + stmt_cost (base_cand
->cand_stmt
, speed
));
1140 else if (subtract_p
)
1142 slsr_cand_t subtrahend_cand
= base_cand_from_table (addend_in
);
1144 while (subtrahend_cand
&& !base
&& subtrahend_cand
->kind
!= CAND_PHI
)
1146 if (subtrahend_cand
->kind
== CAND_MULT
1147 && subtrahend_cand
->index
.is_zero ()
1148 && TREE_CODE (subtrahend_cand
->stride
) == INTEGER_CST
)
1150 /* Z = (B + 0) * S, S constant
1152 ===========================
1153 Value: X = Y + ((-1 * S) * B) */
1155 index
= tree_to_double_int (subtrahend_cand
->stride
);
1157 stride
= subtrahend_cand
->base_expr
;
1158 ctype
= TREE_TYPE (base_in
);
1159 if (has_single_use (addend_in
))
1160 savings
= (subtrahend_cand
->dead_savings
1161 + stmt_cost (subtrahend_cand
->cand_stmt
, speed
));
1164 if (subtrahend_cand
->next_interp
)
1165 subtrahend_cand
= lookup_cand (subtrahend_cand
->next_interp
);
1167 subtrahend_cand
= NULL
;
1171 if (base_cand
->next_interp
)
1172 base_cand
= lookup_cand (base_cand
->next_interp
);
1179 /* No interpretations had anything useful to propagate, so
1180 produce X = Y + (1 * Z). */
1182 index
= subtract_p
? double_int_minus_one
: double_int_one
;
1184 ctype
= TREE_TYPE (base_in
);
1187 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, base
, index
, stride
,
1192 /* Create a candidate entry for a statement GS, where GS adds SSA
1193 name BASE_IN to constant INDEX_IN. Propagate any known information
1194 about BASE_IN into the new candidate. Return the new candidate. */
1197 create_add_imm_cand (gimple gs
, tree base_in
, double_int index_in
, bool speed
)
1199 enum cand_kind kind
= CAND_ADD
;
1200 tree base
= NULL_TREE
, stride
= NULL_TREE
, ctype
= NULL_TREE
;
1201 double_int index
, multiple
;
1202 unsigned savings
= 0;
1204 slsr_cand_t base_cand
= base_cand_from_table (base_in
);
1206 while (base_cand
&& !base
&& base_cand
->kind
!= CAND_PHI
)
1208 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (base_cand
->stride
));
1210 if (TREE_CODE (base_cand
->stride
) == INTEGER_CST
1211 && index_in
.multiple_of (tree_to_double_int (base_cand
->stride
),
1212 unsigned_p
, &multiple
))
1214 /* Y = (B + i') * S, S constant, c = kS for some integer k
1216 ============================
1217 X = (B + (i'+ k)) * S
1219 Y = B + (i' * S), S constant, c = kS for some integer k
1221 ============================
1222 X = (B + (i'+ k)) * S */
1223 kind
= base_cand
->kind
;
1224 base
= base_cand
->base_expr
;
1225 index
= base_cand
->index
+ multiple
;
1226 stride
= base_cand
->stride
;
1227 ctype
= base_cand
->cand_type
;
1228 if (has_single_use (base_in
))
1229 savings
= (base_cand
->dead_savings
1230 + stmt_cost (base_cand
->cand_stmt
, speed
));
1233 if (base_cand
->next_interp
)
1234 base_cand
= lookup_cand (base_cand
->next_interp
);
1241 /* No interpretations had anything useful to propagate, so
1242 produce X = Y + (c * 1). */
1246 stride
= integer_one_node
;
1247 ctype
= TREE_TYPE (base_in
);
1250 c
= alloc_cand_and_find_basis (kind
, gs
, base
, index
, stride
,
1255 /* Given GS which is an add or subtract of scalar integers or pointers,
1256 make at least one appropriate entry in the candidate table. */
1259 slsr_process_add (gimple gs
, tree rhs1
, tree rhs2
, bool speed
)
1261 bool subtract_p
= gimple_assign_rhs_code (gs
) == MINUS_EXPR
;
1262 slsr_cand_t c
= NULL
, c2
;
1264 if (TREE_CODE (rhs2
) == SSA_NAME
)
1266 /* First record an interpretation assuming RHS1 is the base expression
1267 and RHS2 is the stride. But it doesn't make sense for the
1268 stride to be a pointer, so don't record a candidate in that case. */
1269 if (!POINTER_TYPE_P (TREE_TYPE (rhs2
)))
1271 c
= create_add_ssa_cand (gs
, rhs1
, rhs2
, subtract_p
, speed
);
1273 /* Add the first interpretation to the statement-candidate
1275 add_cand_for_stmt (gs
, c
);
1278 /* If the two RHS operands are identical, or this is a subtract,
1280 if (operand_equal_p (rhs1
, rhs2
, 0) || subtract_p
)
1283 /* Otherwise, record another interpretation assuming RHS2 is the
1284 base expression and RHS1 is the stride, again provided that the
1285 stride is not a pointer. */
1286 if (!POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1288 c2
= create_add_ssa_cand (gs
, rhs2
, rhs1
, false, speed
);
1290 c
->next_interp
= c2
->cand_num
;
1292 add_cand_for_stmt (gs
, c2
);
1299 /* Record an interpretation for the add-immediate. */
1300 index
= tree_to_double_int (rhs2
);
1304 c
= create_add_imm_cand (gs
, rhs1
, index
, speed
);
1306 /* Add the interpretation to the statement-candidate mapping. */
1307 add_cand_for_stmt (gs
, c
);
1311 /* Given GS which is a negate of a scalar integer, make an appropriate
1312 entry in the candidate table. A negate is equivalent to a multiply
1316 slsr_process_neg (gimple gs
, tree rhs1
, bool speed
)
1318 /* Record a CAND_MULT interpretation for the multiply by -1. */
1319 slsr_cand_t c
= create_mul_imm_cand (gs
, rhs1
, integer_minus_one_node
, speed
);
1321 /* Add the interpretation to the statement-candidate mapping. */
1322 add_cand_for_stmt (gs
, c
);
1325 /* Help function for legal_cast_p, operating on two trees. Checks
1326 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1327 for more details. */
1330 legal_cast_p_1 (tree lhs
, tree rhs
)
1332 tree lhs_type
, rhs_type
;
1333 unsigned lhs_size
, rhs_size
;
1334 bool lhs_wraps
, rhs_wraps
;
1336 lhs_type
= TREE_TYPE (lhs
);
1337 rhs_type
= TREE_TYPE (rhs
);
1338 lhs_size
= TYPE_PRECISION (lhs_type
);
1339 rhs_size
= TYPE_PRECISION (rhs_type
);
1340 lhs_wraps
= TYPE_OVERFLOW_WRAPS (lhs_type
);
1341 rhs_wraps
= TYPE_OVERFLOW_WRAPS (rhs_type
);
1343 if (lhs_size
< rhs_size
1344 || (rhs_wraps
&& !lhs_wraps
)
1345 || (rhs_wraps
&& lhs_wraps
&& rhs_size
!= lhs_size
))
1351 /* Return TRUE if GS is a statement that defines an SSA name from
1352 a conversion and is legal for us to combine with an add and multiply
1353 in the candidate table. For example, suppose we have:
1359 Without the type-cast, we would create a CAND_MULT for D with base B,
1360 index i, and stride S. We want to record this candidate only if it
1361 is equivalent to apply the type cast following the multiply:
1367 We will record the type with the candidate for D. This allows us
1368 to use a similar previous candidate as a basis. If we have earlier seen
1374 we can replace D with
1376 D = D' + (i - i') * S;
1378 But if moving the type-cast would change semantics, we mustn't do this.
1380 This is legitimate for casts from a non-wrapping integral type to
1381 any integral type of the same or larger size. It is not legitimate
1382 to convert a wrapping type to a non-wrapping type, or to a wrapping
1383 type of a different size. I.e., with a wrapping type, we must
1384 assume that the addition B + i could wrap, in which case performing
1385 the multiply before or after one of the "illegal" type casts will
1386 have different semantics. */
1389 legal_cast_p (gimple gs
, tree rhs
)
1391 if (!is_gimple_assign (gs
)
1392 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
)))
1395 return legal_cast_p_1 (gimple_assign_lhs (gs
), rhs
);
1398 /* Given GS which is a cast to a scalar integer type, determine whether
1399 the cast is legal for strength reduction. If so, make at least one
1400 appropriate entry in the candidate table. */
1403 slsr_process_cast (gimple gs
, tree rhs1
, bool speed
)
1406 slsr_cand_t base_cand
, c
, c2
;
1407 unsigned savings
= 0;
1409 if (!legal_cast_p (gs
, rhs1
))
1412 lhs
= gimple_assign_lhs (gs
);
1413 base_cand
= base_cand_from_table (rhs1
);
1414 ctype
= TREE_TYPE (lhs
);
1416 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1420 /* Propagate all data from the base candidate except the type,
1421 which comes from the cast, and the base candidate's cast,
1422 which is no longer applicable. */
1423 if (has_single_use (rhs1
))
1424 savings
= (base_cand
->dead_savings
1425 + stmt_cost (base_cand
->cand_stmt
, speed
));
1427 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1428 base_cand
->base_expr
,
1429 base_cand
->index
, base_cand
->stride
,
1431 if (base_cand
->next_interp
)
1432 base_cand
= lookup_cand (base_cand
->next_interp
);
1439 /* If nothing is known about the RHS, create fresh CAND_ADD and
1440 CAND_MULT interpretations:
1445 The first of these is somewhat arbitrary, but the choice of
1446 1 for the stride simplifies the logic for propagating casts
1448 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, double_int_zero
,
1449 integer_one_node
, ctype
, 0);
1450 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, double_int_zero
,
1451 integer_one_node
, ctype
, 0);
1452 c
->next_interp
= c2
->cand_num
;
1455 /* Add the first (or only) interpretation to the statement-candidate
1457 add_cand_for_stmt (gs
, c
);
1460 /* Given GS which is a copy of a scalar integer type, make at least one
1461 appropriate entry in the candidate table.
1463 This interface is included for completeness, but is unnecessary
1464 if this pass immediately follows a pass that performs copy
1465 propagation, such as DOM. */
1468 slsr_process_copy (gimple gs
, tree rhs1
, bool speed
)
1470 slsr_cand_t base_cand
, c
, c2
;
1471 unsigned savings
= 0;
1473 base_cand
= base_cand_from_table (rhs1
);
1475 if (base_cand
&& base_cand
->kind
!= CAND_PHI
)
1479 /* Propagate all data from the base candidate. */
1480 if (has_single_use (rhs1
))
1481 savings
= (base_cand
->dead_savings
1482 + stmt_cost (base_cand
->cand_stmt
, speed
));
1484 c
= alloc_cand_and_find_basis (base_cand
->kind
, gs
,
1485 base_cand
->base_expr
,
1486 base_cand
->index
, base_cand
->stride
,
1487 base_cand
->cand_type
, savings
);
1488 if (base_cand
->next_interp
)
1489 base_cand
= lookup_cand (base_cand
->next_interp
);
1496 /* If nothing is known about the RHS, create fresh CAND_ADD and
1497 CAND_MULT interpretations:
1502 The first of these is somewhat arbitrary, but the choice of
1503 1 for the stride simplifies the logic for propagating casts
1505 c
= alloc_cand_and_find_basis (CAND_ADD
, gs
, rhs1
, double_int_zero
,
1506 integer_one_node
, TREE_TYPE (rhs1
), 0);
1507 c2
= alloc_cand_and_find_basis (CAND_MULT
, gs
, rhs1
, double_int_zero
,
1508 integer_one_node
, TREE_TYPE (rhs1
), 0);
1509 c
->next_interp
= c2
->cand_num
;
1512 /* Add the first (or only) interpretation to the statement-candidate
1514 add_cand_for_stmt (gs
, c
);
1517 /* Find strength-reduction candidates in block BB. */
1520 find_candidates_in_block (struct dom_walk_data
*walk_data ATTRIBUTE_UNUSED
,
1523 bool speed
= optimize_bb_for_speed_p (bb
);
1524 gimple_stmt_iterator gsi
;
1526 // FORNOW: Disable conditional candidate processing until bootstrap
1527 // issue can be sorted out for i686-pc-linux-gnu.
1529 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1530 slsr_process_phi (gsi_stmt (gsi
), speed
);
1533 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1535 gimple gs
= gsi_stmt (gsi
);
1537 if (gimple_vuse (gs
) && gimple_assign_single_p (gs
))
1538 slsr_process_ref (gs
);
1540 else if (is_gimple_assign (gs
)
1541 && SCALAR_INT_MODE_P
1542 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))))
1544 tree rhs1
= NULL_TREE
, rhs2
= NULL_TREE
;
1546 switch (gimple_assign_rhs_code (gs
))
1550 rhs1
= gimple_assign_rhs1 (gs
);
1551 rhs2
= gimple_assign_rhs2 (gs
);
1552 /* Should never happen, but currently some buggy situations
1553 in earlier phases put constants in rhs1. */
1554 if (TREE_CODE (rhs1
) != SSA_NAME
)
1558 /* Possible future opportunity: rhs1 of a ptr+ can be
1560 case POINTER_PLUS_EXPR
:
1562 rhs2
= gimple_assign_rhs2 (gs
);
1568 rhs1
= gimple_assign_rhs1 (gs
);
1569 if (TREE_CODE (rhs1
) != SSA_NAME
)
1577 switch (gimple_assign_rhs_code (gs
))
1580 slsr_process_mul (gs
, rhs1
, rhs2
, speed
);
1584 case POINTER_PLUS_EXPR
:
1586 slsr_process_add (gs
, rhs1
, rhs2
, speed
);
1590 slsr_process_neg (gs
, rhs1
, speed
);
1594 slsr_process_cast (gs
, rhs1
, speed
);
1598 slsr_process_copy (gs
, rhs1
, speed
);
1608 /* Dump a candidate for debug. */
1611 dump_candidate (slsr_cand_t c
)
1613 fprintf (dump_file
, "%3d [%d] ", c
->cand_num
,
1614 gimple_bb (c
->cand_stmt
)->index
);
1615 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1619 fputs (" MULT : (", dump_file
);
1620 print_generic_expr (dump_file
, c
->base_expr
, 0);
1621 fputs (" + ", dump_file
);
1622 dump_double_int (dump_file
, c
->index
, false);
1623 fputs (") * ", dump_file
);
1624 print_generic_expr (dump_file
, c
->stride
, 0);
1625 fputs (" : ", dump_file
);
1628 fputs (" ADD : ", dump_file
);
1629 print_generic_expr (dump_file
, c
->base_expr
, 0);
1630 fputs (" + (", dump_file
);
1631 dump_double_int (dump_file
, c
->index
, false);
1632 fputs (" * ", dump_file
);
1633 print_generic_expr (dump_file
, c
->stride
, 0);
1634 fputs (") : ", dump_file
);
1637 fputs (" REF : ", dump_file
);
1638 print_generic_expr (dump_file
, c
->base_expr
, 0);
1639 fputs (" + (", dump_file
);
1640 print_generic_expr (dump_file
, c
->stride
, 0);
1641 fputs (") + ", dump_file
);
1642 dump_double_int (dump_file
, c
->index
, false);
1643 fputs (" : ", dump_file
);
1646 fputs (" PHI : ", dump_file
);
1647 print_generic_expr (dump_file
, c
->base_expr
, 0);
1648 fputs (" + (unknown * ", dump_file
);
1649 print_generic_expr (dump_file
, c
->stride
, 0);
1650 fputs (") : ", dump_file
);
1655 print_generic_expr (dump_file
, c
->cand_type
, 0);
1656 fprintf (dump_file
, "\n basis: %d dependent: %d sibling: %d\n",
1657 c
->basis
, c
->dependent
, c
->sibling
);
1658 fprintf (dump_file
, " next-interp: %d dead-savings: %d\n",
1659 c
->next_interp
, c
->dead_savings
);
1661 fprintf (dump_file
, " phi: %d\n", c
->def_phi
);
1662 fputs ("\n", dump_file
);
1665 /* Dump the candidate vector for debug. */
1668 dump_cand_vec (void)
1673 fprintf (dump_file
, "\nStrength reduction candidate vector:\n\n");
1675 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
1679 /* Callback used to dump the candidate chains hash table. */
1682 ssa_base_cand_dump_callback (cand_chain
**slot
, void *ignored ATTRIBUTE_UNUSED
)
1684 const_cand_chain_t chain
= *slot
;
1687 print_generic_expr (dump_file
, chain
->base_expr
, 0);
1688 fprintf (dump_file
, " -> %d", chain
->cand
->cand_num
);
1690 for (p
= chain
->next
; p
; p
= p
->next
)
1691 fprintf (dump_file
, " -> %d", p
->cand
->cand_num
);
1693 fputs ("\n", dump_file
);
1697 /* Dump the candidate chains. */
1700 dump_cand_chains (void)
1702 fprintf (dump_file
, "\nStrength reduction candidate chains:\n\n");
1703 base_cand_map
.traverse_noresize
<void *, ssa_base_cand_dump_callback
> (NULL
);
1704 fputs ("\n", dump_file
);
1707 /* Dump the increment vector for debug. */
1710 dump_incr_vec (void)
1712 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1716 fprintf (dump_file
, "\nIncrement vector:\n\n");
1718 for (i
= 0; i
< incr_vec_len
; i
++)
1720 fprintf (dump_file
, "%3d increment: ", i
);
1721 dump_double_int (dump_file
, incr_vec
[i
].incr
, false);
1722 fprintf (dump_file
, "\n count: %d", incr_vec
[i
].count
);
1723 fprintf (dump_file
, "\n cost: %d", incr_vec
[i
].cost
);
1724 fputs ("\n initializer: ", dump_file
);
1725 print_generic_expr (dump_file
, incr_vec
[i
].initializer
, 0);
1726 fputs ("\n\n", dump_file
);
1731 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1735 replace_ref (tree
*expr
, slsr_cand_t c
)
1737 tree add_expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (c
->base_expr
),
1738 c
->base_expr
, c
->stride
);
1739 tree mem_ref
= fold_build2 (MEM_REF
, TREE_TYPE (*expr
), add_expr
,
1740 double_int_to_tree (c
->cand_type
, c
->index
));
1742 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1743 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1744 TREE_OPERAND (mem_ref
, 0)
1745 = force_gimple_operand_gsi (&gsi
, TREE_OPERAND (mem_ref
, 0),
1746 /*simple_p=*/true, NULL
,
1747 /*before=*/true, GSI_SAME_STMT
);
1748 copy_ref_info (mem_ref
, *expr
);
1750 update_stmt (c
->cand_stmt
);
1753 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1754 dependent of candidate C with an equivalent strength-reduced data
1758 replace_refs (slsr_cand_t c
)
1760 if (gimple_vdef (c
->cand_stmt
))
1762 tree
*lhs
= gimple_assign_lhs_ptr (c
->cand_stmt
);
1763 replace_ref (lhs
, c
);
1767 tree
*rhs
= gimple_assign_rhs1_ptr (c
->cand_stmt
);
1768 replace_ref (rhs
, c
);
1772 replace_refs (lookup_cand (c
->sibling
));
1775 replace_refs (lookup_cand (c
->dependent
));
1778 /* Return TRUE if candidate C is dependent upon a PHI. */
1781 phi_dependent_cand_p (slsr_cand_t c
)
1783 /* A candidate is not necessarily dependent upon a PHI just because
1784 it has a phi definition for its base name. It may have a basis
1785 that relies upon the same phi definition, in which case the PHI
1786 is irrelevant to this candidate. */
1789 && lookup_cand (c
->basis
)->def_phi
!= c
->def_phi
);
1792 /* Calculate the increment required for candidate C relative to
1796 cand_increment (slsr_cand_t c
)
1800 /* If the candidate doesn't have a basis, just return its own
1801 index. This is useful in record_increments to help us find
1802 an existing initializer. Also, if the candidate's basis is
1803 hidden by a phi, then its own index will be the increment
1804 from the newly introduced phi basis. */
1805 if (!c
->basis
|| phi_dependent_cand_p (c
))
1808 basis
= lookup_cand (c
->basis
);
1809 gcc_assert (operand_equal_p (c
->base_expr
, basis
->base_expr
, 0));
1810 return c
->index
- basis
->index
;
1813 /* Calculate the increment required for candidate C relative to
1814 its basis. If we aren't going to generate pointer arithmetic
1815 for this candidate, return the absolute value of that increment
1818 static inline double_int
1819 cand_abs_increment (slsr_cand_t c
)
1821 double_int increment
= cand_increment (c
);
1823 if (!address_arithmetic_p
&& increment
.is_negative ())
1824 increment
= -increment
;
1829 /* If *VAR is NULL or is not of a compatible type with TYPE, create a
1830 new temporary reg of type TYPE and store it in *VAR. */
1833 lazy_create_slsr_reg (tree
*var
, tree type
)
1835 if (!*var
|| !types_compatible_p (TREE_TYPE (*var
), type
))
1836 *var
= create_tmp_reg (type
, "slsr");
1839 /* Return TRUE iff candidate C has already been replaced under
1840 another interpretation. */
1843 cand_already_replaced (slsr_cand_t c
)
1845 return (gimple_bb (c
->cand_stmt
) == 0);
1848 /* Common logic used by replace_unconditional_candidate and
1849 replace_conditional_candidate. */
1852 replace_mult_candidate (slsr_cand_t c
, tree basis_name
, double_int bump
,
1855 tree target_type
= TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
));
1856 enum tree_code cand_code
= gimple_assign_rhs_code (c
->cand_stmt
);
1858 /* It is highly unlikely, but possible, that the resulting
1859 bump doesn't fit in a HWI. Abandon the replacement
1860 in this case. This does not affect siblings or dependents
1861 of C. Restriction to signed HWI is conservative for unsigned
1862 types but allows for safe negation without twisted logic. */
1863 if (bump
.fits_shwi ()
1864 && bump
.to_shwi () != HOST_WIDE_INT_MIN
1865 /* It is not useful to replace casts, copies, or adds of
1866 an SSA name and a constant. */
1867 && cand_code
!= MODIFY_EXPR
1868 && cand_code
!= NOP_EXPR
1869 && cand_code
!= PLUS_EXPR
1870 && cand_code
!= POINTER_PLUS_EXPR
1871 && cand_code
!= MINUS_EXPR
)
1873 enum tree_code code
= PLUS_EXPR
;
1875 gimple stmt_to_print
= NULL
;
1877 /* If the basis name and the candidate's LHS have incompatible
1878 types, introduce a cast. */
1879 if (!useless_type_conversion_p (target_type
, TREE_TYPE (basis_name
)))
1880 basis_name
= introduce_cast_before_cand (c
, target_type
,
1882 if (bump
.is_negative ())
1888 bump_tree
= double_int_to_tree (target_type
, bump
);
1890 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1892 fputs ("Replacing: ", dump_file
);
1893 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
1896 if (bump
.is_zero ())
1898 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
1899 gimple copy_stmt
= gimple_build_assign (lhs
, basis_name
);
1900 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1901 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
1902 gsi_replace (&gsi
, copy_stmt
, false);
1903 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1904 stmt_to_print
= copy_stmt
;
1909 if (cand_code
!= NEGATE_EXPR
) {
1910 rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
1911 rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
1913 if (cand_code
!= NEGATE_EXPR
1914 && ((operand_equal_p (rhs1
, basis_name
, 0)
1915 && operand_equal_p (rhs2
, bump_tree
, 0))
1916 || (operand_equal_p (rhs1
, bump_tree
, 0)
1917 && operand_equal_p (rhs2
, basis_name
, 0))))
1919 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1921 fputs ("(duplicate, not actually replacing)", dump_file
);
1922 stmt_to_print
= c
->cand_stmt
;
1927 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
1928 gimple_assign_set_rhs_with_ops (&gsi
, code
,
1929 basis_name
, bump_tree
);
1930 update_stmt (gsi_stmt (gsi
));
1931 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1932 stmt_to_print
= gsi_stmt (gsi
);
1936 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1938 fputs ("With: ", dump_file
);
1939 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
1940 fputs ("\n", dump_file
);
1945 /* Replace candidate C with an add or subtract. Note that we only
1946 operate on CAND_MULTs with known strides, so we will never generate
1947 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
1948 X = Y + ((i - i') * S), as described in the module commentary. The
1949 folded value ((i - i') * S) is referred to here as the "bump." */
1952 replace_unconditional_candidate (slsr_cand_t c
)
1955 double_int stride
, bump
;
1958 if (cand_already_replaced (c
))
1961 basis
= lookup_cand (c
->basis
);
1962 stride
= tree_to_double_int (c
->stride
);
1963 bump
= cand_increment (c
) * stride
;
1965 replace_mult_candidate (c
, gimple_assign_lhs (basis
->cand_stmt
), bump
, &var
);
1968 /* Return the index in the increment vector of the given INCREMENT. */
1970 static inline unsigned
1971 incr_vec_index (double_int increment
)
1975 for (i
= 0; i
< incr_vec_len
&& increment
!= incr_vec
[i
].incr
; i
++)
1978 gcc_assert (i
< incr_vec_len
);
1982 /* Create a new statement along edge E to add BASIS_NAME to the product
1983 of INCREMENT and the stride of candidate C. Create and return a new
1984 SSA name from *VAR to be used as the LHS of the new statement.
1985 KNOWN_STRIDE is true iff C's stride is a constant. */
1988 create_add_on_incoming_edge (slsr_cand_t c
, tree basis_name
,
1989 double_int increment
, edge e
, location_t loc
,
1992 basic_block insert_bb
;
1993 gimple_stmt_iterator gsi
;
1994 tree lhs
, basis_type
;
1997 /* If the add candidate along this incoming edge has the same
1998 index as C's hidden basis, the hidden basis represents this
2000 if (increment
.is_zero ())
2003 basis_type
= TREE_TYPE (basis_name
);
2004 lhs
= make_temp_ssa_name (basis_type
, NULL
, "slsr");
2009 enum tree_code code
= PLUS_EXPR
;
2010 double_int bump
= increment
* tree_to_double_int (c
->stride
);
2011 if (bump
.is_negative ())
2017 bump_tree
= double_int_to_tree (basis_type
, bump
);
2018 new_stmt
= gimple_build_assign_with_ops (code
, lhs
, basis_name
,
2024 bool negate_incr
= (!address_arithmetic_p
&& increment
.is_negative ());
2025 i
= incr_vec_index (negate_incr
? -increment
: increment
);
2027 if (incr_vec
[i
].initializer
)
2029 enum tree_code code
= negate_incr
? MINUS_EXPR
: PLUS_EXPR
;
2030 new_stmt
= gimple_build_assign_with_ops (code
, lhs
, basis_name
,
2031 incr_vec
[i
].initializer
);
2033 else if (increment
.is_one ())
2034 new_stmt
= gimple_build_assign_with_ops (PLUS_EXPR
, lhs
, basis_name
,
2036 else if (increment
.is_minus_one ())
2037 new_stmt
= gimple_build_assign_with_ops (MINUS_EXPR
, lhs
, basis_name
,
2043 insert_bb
= single_succ_p (e
->src
) ? e
->src
: split_edge (e
);
2044 gsi
= gsi_last_bb (insert_bb
);
2046 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
2047 gsi_insert_before (&gsi
, new_stmt
, GSI_NEW_STMT
);
2049 gsi_insert_after (&gsi
, new_stmt
, GSI_NEW_STMT
);
2051 gimple_set_location (new_stmt
, loc
);
2053 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2055 fprintf (dump_file
, "Inserting in block %d: ", insert_bb
->index
);
2056 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
2062 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2063 is hidden by the phi node FROM_PHI, create a new phi node in the same
2064 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2065 with its phi arguments representing conditional adjustments to the
2066 hidden basis along conditional incoming paths. Those adjustments are
2067 made by creating add statements (and sometimes recursively creating
2068 phis) along those incoming paths. LOC is the location to attach to
2069 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2073 create_phi_basis (slsr_cand_t c
, gimple from_phi
, tree basis_name
,
2074 location_t loc
, bool known_stride
)
2080 slsr_cand_t basis
= lookup_cand (c
->basis
);
2081 int nargs
= gimple_phi_num_args (from_phi
);
2082 basic_block phi_bb
= gimple_bb (from_phi
);
2083 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (from_phi
));
2084 phi_args
.create (nargs
);
2086 /* Process each argument of the existing phi that represents
2087 conditionally-executed add candidates. */
2088 for (i
= 0; i
< nargs
; i
++)
2090 edge e
= (*phi_bb
->preds
)[i
];
2091 tree arg
= gimple_phi_arg_def (from_phi
, i
);
2094 /* If the phi argument is the base name of the CAND_PHI, then
2095 this incoming arc should use the hidden basis. */
2096 if (operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2097 if (basis
->index
.is_zero ())
2098 feeding_def
= gimple_assign_lhs (basis
->cand_stmt
);
2101 double_int incr
= c
->index
- basis
->index
;
2102 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, incr
,
2103 e
, loc
, known_stride
);
2107 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2109 /* If there is another phi along this incoming edge, we must
2110 process it in the same fashion to ensure that all basis
2111 adjustments are made along its incoming edges. */
2112 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2113 feeding_def
= create_phi_basis (c
, arg_def
, basis_name
,
2117 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2118 double_int diff
= arg_cand
->index
- basis
->index
;
2119 feeding_def
= create_add_on_incoming_edge (c
, basis_name
, diff
,
2120 e
, loc
, known_stride
);
2124 /* Because of recursion, we need to save the arguments in a vector
2125 so we can create the PHI statement all at once. Otherwise the
2126 storage for the half-created PHI can be reclaimed. */
2127 phi_args
.safe_push (feeding_def
);
2130 /* Create the new phi basis. */
2131 name
= make_temp_ssa_name (TREE_TYPE (basis_name
), NULL
, "slsr");
2132 phi
= create_phi_node (name
, phi_bb
);
2133 SSA_NAME_DEF_STMT (name
) = phi
;
2135 FOR_EACH_VEC_ELT (phi_args
, i
, phi_arg
)
2137 edge e
= (*phi_bb
->preds
)[i
];
2138 add_phi_arg (phi
, phi_arg
, e
, loc
);
2143 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2145 fputs ("Introducing new phi basis: ", dump_file
);
2146 print_gimple_stmt (dump_file
, phi
, 0, 0);
2152 /* Given a candidate C whose basis is hidden by at least one intervening
2153 phi, introduce a matching number of new phis to represent its basis
2154 adjusted by conditional increments along possible incoming paths. Then
2155 replace C as though it were an unconditional candidate, using the new
2159 replace_conditional_candidate (slsr_cand_t c
)
2161 tree basis_name
, name
, var
= NULL
;
2164 double_int stride
, bump
;
2166 /* Look up the LHS SSA name from C's basis. This will be the
2167 RHS1 of the adds we will introduce to create new phi arguments. */
2168 basis
= lookup_cand (c
->basis
);
2169 basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
2171 /* Create a new phi statement which will represent C's true basis
2172 after the transformation is complete. */
2173 loc
= gimple_location (c
->cand_stmt
);
2174 name
= create_phi_basis (c
, lookup_cand (c
->def_phi
)->cand_stmt
,
2175 basis_name
, loc
, KNOWN_STRIDE
);
2176 /* Replace C with an add of the new basis phi and a constant. */
2177 stride
= tree_to_double_int (c
->stride
);
2178 bump
= c
->index
* stride
;
2180 replace_mult_candidate (c
, name
, bump
, &var
);
2183 /* Compute the expected costs of inserting basis adjustments for
2184 candidate C with phi-definition PHI. The cost of inserting
2185 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2186 which are themselves phi results, recursively calculate costs
2187 for those phis as well. */
2190 phi_add_costs (gimple phi
, slsr_cand_t c
, int one_add_cost
)
2194 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2196 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2198 tree arg
= gimple_phi_arg_def (phi
, i
);
2200 if (arg
!= phi_cand
->base_expr
)
2202 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2204 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2205 cost
+= phi_add_costs (arg_def
, c
, one_add_cost
);
2208 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2210 if (arg_cand
->index
!= c
->index
)
2211 cost
+= one_add_cost
;
2219 /* For candidate C, each sibling of candidate C, and each dependent of
2220 candidate C, determine whether the candidate is dependent upon a
2221 phi that hides its basis. If not, replace the candidate unconditionally.
2222 Otherwise, determine whether the cost of introducing compensation code
2223 for the candidate is offset by the gains from strength reduction. If
2224 so, replace the candidate and introduce the compensation code. */
2227 replace_uncond_cands_and_profitable_phis (slsr_cand_t c
)
2229 if (phi_dependent_cand_p (c
))
2231 if (c
->kind
== CAND_MULT
)
2233 /* A candidate dependent upon a phi will replace a multiply by
2234 a constant with an add, and will insert at most one add for
2235 each phi argument. Add these costs with the potential dead-code
2236 savings to determine profitability. */
2237 bool speed
= optimize_bb_for_speed_p (gimple_bb (c
->cand_stmt
));
2238 int mult_savings
= stmt_cost (c
->cand_stmt
, speed
);
2239 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2240 tree phi_result
= gimple_phi_result (phi
);
2241 int one_add_cost
= add_cost (speed
,
2242 TYPE_MODE (TREE_TYPE (phi_result
)));
2243 int add_costs
= one_add_cost
+ phi_add_costs (phi
, c
, one_add_cost
);
2244 int cost
= add_costs
- mult_savings
- c
->dead_savings
;
2246 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2248 fprintf (dump_file
, " Conditional candidate %d:\n", c
->cand_num
);
2249 fprintf (dump_file
, " add_costs = %d\n", add_costs
);
2250 fprintf (dump_file
, " mult_savings = %d\n", mult_savings
);
2251 fprintf (dump_file
, " dead_savings = %d\n", c
->dead_savings
);
2252 fprintf (dump_file
, " cost = %d\n", cost
);
2253 if (cost
<= COST_NEUTRAL
)
2254 fputs (" Replacing...\n", dump_file
);
2256 fputs (" Not replaced.\n", dump_file
);
2259 if (cost
<= COST_NEUTRAL
)
2260 replace_conditional_candidate (c
);
2264 replace_unconditional_candidate (c
);
2267 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->sibling
));
2270 replace_uncond_cands_and_profitable_phis (lookup_cand (c
->dependent
));
2273 /* Count the number of candidates in the tree rooted at C that have
2274 not already been replaced under other interpretations. */
2277 count_candidates (slsr_cand_t c
)
2279 unsigned count
= cand_already_replaced (c
) ? 0 : 1;
2282 count
+= count_candidates (lookup_cand (c
->sibling
));
2285 count
+= count_candidates (lookup_cand (c
->dependent
));
2290 /* Increase the count of INCREMENT by one in the increment vector.
2291 INCREMENT is associated with candidate C. If INCREMENT is to be
2292 conditionally executed as part of a conditional candidate replacement,
2293 IS_PHI_ADJUST is true, otherwise false. If an initializer
2294 T_0 = stride * I is provided by a candidate that dominates all
2295 candidates with the same increment, also record T_0 for subsequent use. */
2298 record_increment (slsr_cand_t c
, double_int increment
, bool is_phi_adjust
)
2303 /* Treat increments that differ only in sign as identical so as to
2304 share initializers, unless we are generating pointer arithmetic. */
2305 if (!address_arithmetic_p
&& increment
.is_negative ())
2306 increment
= -increment
;
2308 for (i
= 0; i
< incr_vec_len
; i
++)
2310 if (incr_vec
[i
].incr
== increment
)
2312 incr_vec
[i
].count
++;
2315 /* If we previously recorded an initializer that doesn't
2316 dominate this candidate, it's not going to be useful to
2318 if (incr_vec
[i
].initializer
2319 && !dominated_by_p (CDI_DOMINATORS
,
2320 gimple_bb (c
->cand_stmt
),
2321 incr_vec
[i
].init_bb
))
2323 incr_vec
[i
].initializer
= NULL_TREE
;
2324 incr_vec
[i
].init_bb
= NULL
;
2333 /* The first time we see an increment, create the entry for it.
2334 If this is the root candidate which doesn't have a basis, set
2335 the count to zero. We're only processing it so it can possibly
2336 provide an initializer for other candidates. */
2337 incr_vec
[incr_vec_len
].incr
= increment
;
2338 incr_vec
[incr_vec_len
].count
= c
->basis
|| is_phi_adjust
? 1 : 0;
2339 incr_vec
[incr_vec_len
].cost
= COST_INFINITE
;
2341 /* Optimistically record the first occurrence of this increment
2342 as providing an initializer (if it does); we will revise this
2343 opinion later if it doesn't dominate all other occurrences.
2344 Exception: increments of -1, 0, 1 never need initializers;
2345 and phi adjustments don't ever provide initializers. */
2346 if (c
->kind
== CAND_ADD
2348 && c
->index
== increment
2349 && (increment
.sgt (double_int_one
)
2350 || increment
.slt (double_int_minus_one
))
2351 && (gimple_assign_rhs_code (c
->cand_stmt
) == PLUS_EXPR
2352 || gimple_assign_rhs_code (c
->cand_stmt
) == POINTER_PLUS_EXPR
))
2354 tree t0
= NULL_TREE
;
2355 tree rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
2356 tree rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
2357 if (operand_equal_p (rhs1
, c
->base_expr
, 0))
2359 else if (operand_equal_p (rhs2
, c
->base_expr
, 0))
2362 && SSA_NAME_DEF_STMT (t0
)
2363 && gimple_bb (SSA_NAME_DEF_STMT (t0
)))
2365 incr_vec
[incr_vec_len
].initializer
= t0
;
2366 incr_vec
[incr_vec_len
++].init_bb
2367 = gimple_bb (SSA_NAME_DEF_STMT (t0
));
2371 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2372 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2377 incr_vec
[incr_vec_len
].initializer
= NULL_TREE
;
2378 incr_vec
[incr_vec_len
++].init_bb
= NULL
;
2383 /* Given phi statement PHI that hides a candidate from its BASIS, find
2384 the increments along each incoming arc (recursively handling additional
2385 phis that may be present) and record them. These increments are the
2386 difference in index between the index-adjusting statements and the
2387 index of the basis. */
2390 record_phi_increments (slsr_cand_t basis
, gimple phi
)
2393 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2395 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2397 tree arg
= gimple_phi_arg_def (phi
, i
);
2399 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2401 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2403 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2404 record_phi_increments (basis
, arg_def
);
2407 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2408 double_int diff
= arg_cand
->index
- basis
->index
;
2409 record_increment (arg_cand
, diff
, PHI_ADJUST
);
2415 /* Determine how many times each unique increment occurs in the set
2416 of candidates rooted at C's parent, recording the data in the
2417 increment vector. For each unique increment I, if an initializer
2418 T_0 = stride * I is provided by a candidate that dominates all
2419 candidates with the same increment, also record T_0 for subsequent
2423 record_increments (slsr_cand_t c
)
2425 if (!cand_already_replaced (c
))
2427 if (!phi_dependent_cand_p (c
))
2428 record_increment (c
, cand_increment (c
), NOT_PHI_ADJUST
);
2431 /* A candidate with a basis hidden by a phi will have one
2432 increment for its relationship to the index represented by
2433 the phi, and potentially additional increments along each
2434 incoming edge. For the root of the dependency tree (which
2435 has no basis), process just the initial index in case it has
2436 an initializer that can be used by subsequent candidates. */
2437 record_increment (c
, c
->index
, NOT_PHI_ADJUST
);
2440 record_phi_increments (lookup_cand (c
->basis
),
2441 lookup_cand (c
->def_phi
)->cand_stmt
);
2446 record_increments (lookup_cand (c
->sibling
));
2449 record_increments (lookup_cand (c
->dependent
));
2452 /* Add up and return the costs of introducing add statements that
2453 require the increment INCR on behalf of candidate C and phi
2454 statement PHI. Accumulate into *SAVINGS the potential savings
2455 from removing existing statements that feed PHI and have no other
2459 phi_incr_cost (slsr_cand_t c
, double_int incr
, gimple phi
, int *savings
)
2463 slsr_cand_t basis
= lookup_cand (c
->basis
);
2464 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2466 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2468 tree arg
= gimple_phi_arg_def (phi
, i
);
2470 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2472 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2474 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2476 int feeding_savings
= 0;
2477 cost
+= phi_incr_cost (c
, incr
, arg_def
, &feeding_savings
);
2478 if (has_single_use (gimple_phi_result (arg_def
)))
2479 *savings
+= feeding_savings
;
2483 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2484 double_int diff
= arg_cand
->index
- basis
->index
;
2488 tree basis_lhs
= gimple_assign_lhs (basis
->cand_stmt
);
2489 tree lhs
= gimple_assign_lhs (arg_cand
->cand_stmt
);
2490 cost
+= add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs
)));
2491 if (has_single_use (lhs
))
2492 *savings
+= stmt_cost (arg_cand
->cand_stmt
, true);
2501 /* Return the first candidate in the tree rooted at C that has not
2502 already been replaced, favoring siblings over dependents. */
2505 unreplaced_cand_in_tree (slsr_cand_t c
)
2507 if (!cand_already_replaced (c
))
2512 slsr_cand_t sib
= unreplaced_cand_in_tree (lookup_cand (c
->sibling
));
2519 slsr_cand_t dep
= unreplaced_cand_in_tree (lookup_cand (c
->dependent
));
2527 /* Return TRUE if the candidates in the tree rooted at C should be
2528 optimized for speed, else FALSE. We estimate this based on the block
2529 containing the most dominant candidate in the tree that has not yet
2533 optimize_cands_for_speed_p (slsr_cand_t c
)
2535 slsr_cand_t c2
= unreplaced_cand_in_tree (c
);
2537 return optimize_bb_for_speed_p (gimple_bb (c2
->cand_stmt
));
2540 /* Add COST_IN to the lowest cost of any dependent path starting at
2541 candidate C or any of its siblings, counting only candidates along
2542 such paths with increment INCR. Assume that replacing a candidate
2543 reduces cost by REPL_SAVINGS. Also account for savings from any
2544 statements that would go dead. If COUNT_PHIS is true, include
2545 costs of introducing feeding statements for conditional candidates. */
2548 lowest_cost_path (int cost_in
, int repl_savings
, slsr_cand_t c
,
2549 double_int incr
, bool count_phis
)
2551 int local_cost
, sib_cost
, savings
= 0;
2552 double_int cand_incr
= cand_abs_increment (c
);
2554 if (cand_already_replaced (c
))
2555 local_cost
= cost_in
;
2556 else if (incr
== cand_incr
)
2557 local_cost
= cost_in
- repl_savings
- c
->dead_savings
;
2559 local_cost
= cost_in
- c
->dead_savings
;
2562 && phi_dependent_cand_p (c
)
2563 && !cand_already_replaced (c
))
2565 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2566 local_cost
+= phi_incr_cost (c
, incr
, phi
, &savings
);
2568 if (has_single_use (gimple_phi_result (phi
)))
2569 local_cost
-= savings
;
2573 local_cost
= lowest_cost_path (local_cost
, repl_savings
,
2574 lookup_cand (c
->dependent
), incr
,
2579 sib_cost
= lowest_cost_path (cost_in
, repl_savings
,
2580 lookup_cand (c
->sibling
), incr
,
2582 local_cost
= MIN (local_cost
, sib_cost
);
2588 /* Compute the total savings that would accrue from all replacements
2589 in the candidate tree rooted at C, counting only candidates with
2590 increment INCR. Assume that replacing a candidate reduces cost
2591 by REPL_SAVINGS. Also account for savings from statements that
2595 total_savings (int repl_savings
, slsr_cand_t c
, double_int incr
,
2599 double_int cand_incr
= cand_abs_increment (c
);
2601 if (incr
== cand_incr
&& !cand_already_replaced (c
))
2602 savings
+= repl_savings
+ c
->dead_savings
;
2605 && phi_dependent_cand_p (c
)
2606 && !cand_already_replaced (c
))
2608 int phi_savings
= 0;
2609 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
2610 savings
-= phi_incr_cost (c
, incr
, phi
, &phi_savings
);
2612 if (has_single_use (gimple_phi_result (phi
)))
2613 savings
+= phi_savings
;
2617 savings
+= total_savings (repl_savings
, lookup_cand (c
->dependent
), incr
,
2621 savings
+= total_savings (repl_savings
, lookup_cand (c
->sibling
), incr
,
2627 /* Use target-specific costs to determine and record which increments
2628 in the current candidate tree are profitable to replace, assuming
2629 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2632 One slight limitation here is that we don't account for the possible
2633 introduction of casts in some cases. See replace_one_candidate for
2634 the cases where these are introduced. This should probably be cleaned
2638 analyze_increments (slsr_cand_t first_dep
, enum machine_mode mode
, bool speed
)
2642 for (i
= 0; i
< incr_vec_len
; i
++)
2644 HOST_WIDE_INT incr
= incr_vec
[i
].incr
.to_shwi ();
2646 /* If somehow this increment is bigger than a HWI, we won't
2647 be optimizing candidates that use it. And if the increment
2648 has a count of zero, nothing will be done with it. */
2649 if (!incr_vec
[i
].incr
.fits_shwi () || !incr_vec
[i
].count
)
2650 incr_vec
[i
].cost
= COST_INFINITE
;
2652 /* Increments of 0, 1, and -1 are always profitable to replace,
2653 because they always replace a multiply or add with an add or
2654 copy, and may cause one or more existing instructions to go
2655 dead. Exception: -1 can't be assumed to be profitable for
2656 pointer addition. */
2660 && (gimple_assign_rhs_code (first_dep
->cand_stmt
)
2661 != POINTER_PLUS_EXPR
)))
2662 incr_vec
[i
].cost
= COST_NEUTRAL
;
2664 /* FORNOW: If we need to add an initializer, give up if a cast from
2665 the candidate's type to its stride's type can lose precision.
2666 This could eventually be handled better by expressly retaining the
2667 result of a cast to a wider type in the stride. Example:
2672 _4 = x + _3; ADD: x + (10 * _1) : int
2674 _6 = x + _3; ADD: x + (15 * _1) : int
2676 Right now replacing _6 would cause insertion of an initializer
2677 of the form "short int T = _1 * 5;" followed by a cast to
2678 int, which could overflow incorrectly. Had we recorded _2 or
2679 (int)_1 as the stride, this wouldn't happen. However, doing
2680 this breaks other opportunities, so this will require some
2682 else if (!incr_vec
[i
].initializer
2683 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2684 && !legal_cast_p_1 (first_dep
->stride
,
2685 gimple_assign_lhs (first_dep
->cand_stmt
)))
2687 incr_vec
[i
].cost
= COST_INFINITE
;
2689 /* If we need to add an initializer, make sure we don't introduce
2690 a multiply by a pointer type, which can happen in certain cast
2691 scenarios. FIXME: When cleaning up these cast issues, we can
2692 afford to introduce the multiply provided we cast out to an
2693 unsigned int of appropriate size. */
2694 else if (!incr_vec
[i
].initializer
2695 && TREE_CODE (first_dep
->stride
) != INTEGER_CST
2696 && POINTER_TYPE_P (TREE_TYPE (first_dep
->stride
)))
2698 incr_vec
[i
].cost
= COST_INFINITE
;
2700 /* For any other increment, if this is a multiply candidate, we
2701 must introduce a temporary T and initialize it with
2702 T_0 = stride * increment. When optimizing for speed, walk the
2703 candidate tree to calculate the best cost reduction along any
2704 path; if it offsets the fixed cost of inserting the initializer,
2705 replacing the increment is profitable. When optimizing for
2706 size, instead calculate the total cost reduction from replacing
2707 all candidates with this increment. */
2708 else if (first_dep
->kind
== CAND_MULT
)
2710 int cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2711 int repl_savings
= mul_cost (speed
, mode
) - add_cost (speed
, mode
);
2713 cost
= lowest_cost_path (cost
, repl_savings
, first_dep
,
2714 incr_vec
[i
].incr
, COUNT_PHIS
);
2716 cost
-= total_savings (repl_savings
, first_dep
, incr_vec
[i
].incr
,
2719 incr_vec
[i
].cost
= cost
;
2722 /* If this is an add candidate, the initializer may already
2723 exist, so only calculate the cost of the initializer if it
2724 doesn't. We are replacing one add with another here, so the
2725 known replacement savings is zero. We will account for removal
2726 of dead instructions in lowest_cost_path or total_savings. */
2730 if (!incr_vec
[i
].initializer
)
2731 cost
= mult_by_coeff_cost (incr
, mode
, speed
);
2734 cost
= lowest_cost_path (cost
, 0, first_dep
, incr_vec
[i
].incr
,
2737 cost
-= total_savings (0, first_dep
, incr_vec
[i
].incr
,
2740 incr_vec
[i
].cost
= cost
;
2745 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2746 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2747 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2748 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2749 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2752 ncd_for_two_cands (basic_block bb1
, basic_block bb2
,
2753 slsr_cand_t c1
, slsr_cand_t c2
, slsr_cand_t
*where
)
2769 ncd
= nearest_common_dominator (CDI_DOMINATORS
, bb1
, bb2
);
2771 /* If both candidates are in the same block, the earlier
2773 if (bb1
== ncd
&& bb2
== ncd
)
2775 if (!c1
|| (c2
&& c2
->cand_num
< c1
->cand_num
))
2781 /* Otherwise, if one of them produced a candidate in the
2782 dominator, that one wins. */
2783 else if (bb1
== ncd
)
2786 else if (bb2
== ncd
)
2789 /* If neither matches the dominator, neither wins. */
2796 /* Consider all candidates that feed PHI. Find the nearest common
2797 dominator of those candidates requiring the given increment INCR.
2798 Further find and return the nearest common dominator of this result
2799 with block NCD. If the returned block contains one or more of the
2800 candidates, return the earliest candidate in the block in *WHERE. */
2803 ncd_with_phi (slsr_cand_t c
, double_int incr
, gimple phi
,
2804 basic_block ncd
, slsr_cand_t
*where
)
2807 slsr_cand_t basis
= lookup_cand (c
->basis
);
2808 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
2810 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2812 tree arg
= gimple_phi_arg_def (phi
, i
);
2814 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
2816 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
2818 if (gimple_code (arg_def
) == GIMPLE_PHI
)
2819 ncd
= ncd_with_phi (c
, incr
, arg_def
, ncd
, where
);
2822 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
2823 double_int diff
= arg_cand
->index
- basis
->index
;
2825 if ((incr
== diff
) || (!address_arithmetic_p
&& incr
== -diff
))
2826 ncd
= ncd_for_two_cands (ncd
, gimple_bb (arg_cand
->cand_stmt
),
2827 *where
, arg_cand
, where
);
2835 /* Consider the candidate C together with any candidates that feed
2836 C's phi dependence (if any). Find and return the nearest common
2837 dominator of those candidates requiring the given increment INCR.
2838 If the returned block contains one or more of the candidates,
2839 return the earliest candidate in the block in *WHERE. */
2842 ncd_of_cand_and_phis (slsr_cand_t c
, double_int incr
, slsr_cand_t
*where
)
2844 basic_block ncd
= NULL
;
2846 if (cand_abs_increment (c
) == incr
)
2848 ncd
= gimple_bb (c
->cand_stmt
);
2852 if (phi_dependent_cand_p (c
))
2853 ncd
= ncd_with_phi (c
, incr
, lookup_cand (c
->def_phi
)->cand_stmt
,
2859 /* Consider all candidates in the tree rooted at C for which INCR
2860 represents the required increment of C relative to its basis.
2861 Find and return the basic block that most nearly dominates all
2862 such candidates. If the returned block contains one or more of
2863 the candidates, return the earliest candidate in the block in
2867 nearest_common_dominator_for_cands (slsr_cand_t c
, double_int incr
,
2870 basic_block sib_ncd
= NULL
, dep_ncd
= NULL
, this_ncd
= NULL
, ncd
;
2871 slsr_cand_t sib_where
= NULL
, dep_where
= NULL
, this_where
= NULL
, new_where
;
2873 /* First find the NCD of all siblings and dependents. */
2875 sib_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->sibling
),
2878 dep_ncd
= nearest_common_dominator_for_cands (lookup_cand (c
->dependent
),
2880 if (!sib_ncd
&& !dep_ncd
)
2885 else if (sib_ncd
&& !dep_ncd
)
2887 new_where
= sib_where
;
2890 else if (dep_ncd
&& !sib_ncd
)
2892 new_where
= dep_where
;
2896 ncd
= ncd_for_two_cands (sib_ncd
, dep_ncd
, sib_where
,
2897 dep_where
, &new_where
);
2899 /* If the candidate's increment doesn't match the one we're interested
2900 in (and nor do any increments for feeding defs of a phi-dependence),
2901 then the result depends only on siblings and dependents. */
2902 this_ncd
= ncd_of_cand_and_phis (c
, incr
, &this_where
);
2904 if (!this_ncd
|| cand_already_replaced (c
))
2910 /* Otherwise, compare this candidate with the result from all siblings
2912 ncd
= ncd_for_two_cands (ncd
, this_ncd
, new_where
, this_where
, where
);
2917 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
2920 profitable_increment_p (unsigned index
)
2922 return (incr_vec
[index
].cost
<= COST_NEUTRAL
);
2925 /* For each profitable increment in the increment vector not equal to
2926 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
2927 dominator of all statements in the candidate chain rooted at C
2928 that require that increment, and insert an initializer
2929 T_0 = stride * increment at that location. Record T_0 with the
2930 increment record. */
2933 insert_initializers (slsr_cand_t c
)
2936 tree new_var
= NULL_TREE
;
2938 for (i
= 0; i
< incr_vec_len
; i
++)
2941 slsr_cand_t where
= NULL
;
2943 tree stride_type
, new_name
, incr_tree
;
2944 double_int incr
= incr_vec
[i
].incr
;
2946 if (!profitable_increment_p (i
)
2948 || (incr
.is_minus_one ()
2949 && gimple_assign_rhs_code (c
->cand_stmt
) != POINTER_PLUS_EXPR
)
2953 /* We may have already identified an existing initializer that
2955 if (incr_vec
[i
].initializer
)
2957 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2959 fputs ("Using existing initializer: ", dump_file
);
2960 print_gimple_stmt (dump_file
,
2961 SSA_NAME_DEF_STMT (incr_vec
[i
].initializer
),
2967 /* Find the block that most closely dominates all candidates
2968 with this increment. If there is at least one candidate in
2969 that block, the earliest one will be returned in WHERE. */
2970 bb
= nearest_common_dominator_for_cands (c
, incr
, &where
);
2972 /* Create a new SSA name to hold the initializer's value. */
2973 stride_type
= TREE_TYPE (c
->stride
);
2974 lazy_create_slsr_reg (&new_var
, stride_type
);
2975 new_name
= make_ssa_name (new_var
, NULL
);
2976 incr_vec
[i
].initializer
= new_name
;
2978 /* Create the initializer and insert it in the latest possible
2979 dominating position. */
2980 incr_tree
= double_int_to_tree (stride_type
, incr
);
2981 init_stmt
= gimple_build_assign_with_ops (MULT_EXPR
, new_name
,
2982 c
->stride
, incr_tree
);
2985 gimple_stmt_iterator gsi
= gsi_for_stmt (where
->cand_stmt
);
2986 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
2987 gimple_set_location (init_stmt
, gimple_location (where
->cand_stmt
));
2991 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2992 gimple basis_stmt
= lookup_cand (c
->basis
)->cand_stmt
;
2994 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
2995 gsi_insert_before (&gsi
, init_stmt
, GSI_SAME_STMT
);
2997 gsi_insert_after (&gsi
, init_stmt
, GSI_SAME_STMT
);
2999 gimple_set_location (init_stmt
, gimple_location (basis_stmt
));
3002 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3004 fputs ("Inserting initializer: ", dump_file
);
3005 print_gimple_stmt (dump_file
, init_stmt
, 0, 0);
3010 /* Return TRUE iff all required increments for candidates feeding PHI
3011 are profitable to replace on behalf of candidate C. */
3014 all_phi_incrs_profitable (slsr_cand_t c
, gimple phi
)
3017 slsr_cand_t basis
= lookup_cand (c
->basis
);
3018 slsr_cand_t phi_cand
= base_cand_from_table (gimple_phi_result (phi
));
3020 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
3022 tree arg
= gimple_phi_arg_def (phi
, i
);
3024 if (!operand_equal_p (arg
, phi_cand
->base_expr
, 0))
3026 gimple arg_def
= SSA_NAME_DEF_STMT (arg
);
3028 if (gimple_code (arg_def
) == GIMPLE_PHI
)
3030 if (!all_phi_incrs_profitable (c
, arg_def
))
3036 slsr_cand_t arg_cand
= base_cand_from_table (arg
);
3037 double_int increment
= arg_cand
->index
- basis
->index
;
3039 if (!address_arithmetic_p
&& increment
.is_negative ())
3040 increment
= -increment
;
3042 j
= incr_vec_index (increment
);
3044 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3046 fprintf (dump_file
, " Conditional candidate %d, phi: ",
3048 print_gimple_stmt (dump_file
, phi
, 0, 0);
3049 fputs (" increment: ", dump_file
);
3050 dump_double_int (dump_file
, increment
, false);
3051 fprintf (dump_file
, "\n cost: %d\n", incr_vec
[j
].cost
);
3052 if (profitable_increment_p (j
))
3053 fputs (" Replacing...\n", dump_file
);
3055 fputs (" Not replaced.\n", dump_file
);
3058 if (!profitable_increment_p (j
))
3067 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3068 type TO_TYPE, and insert it in front of the statement represented
3069 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3070 the new SSA name. */
3073 introduce_cast_before_cand (slsr_cand_t c
, tree to_type
,
3074 tree from_expr
, tree
*new_var
)
3078 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3080 lazy_create_slsr_reg (new_var
, to_type
);
3081 cast_lhs
= make_ssa_name (*new_var
, NULL
);
3082 cast_stmt
= gimple_build_assign_with_ops (NOP_EXPR
, cast_lhs
,
3083 from_expr
, NULL_TREE
);
3084 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3085 gsi_insert_before (&gsi
, cast_stmt
, GSI_SAME_STMT
);
3087 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3089 fputs (" Inserting: ", dump_file
);
3090 print_gimple_stmt (dump_file
, cast_stmt
, 0, 0);
3096 /* Replace the RHS of the statement represented by candidate C with
3097 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3098 leave C unchanged or just interchange its operands. The original
3099 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3100 If the replacement was made and we are doing a details dump,
3101 return the revised statement, else NULL. */
3104 replace_rhs_if_not_dup (enum tree_code new_code
, tree new_rhs1
, tree new_rhs2
,
3105 enum tree_code old_code
, tree old_rhs1
, tree old_rhs2
,
3108 if (new_code
!= old_code
3109 || ((!operand_equal_p (new_rhs1
, old_rhs1
, 0)
3110 || !operand_equal_p (new_rhs2
, old_rhs2
, 0))
3111 && (!operand_equal_p (new_rhs1
, old_rhs2
, 0)
3112 || !operand_equal_p (new_rhs2
, old_rhs1
, 0))))
3114 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3115 gimple_assign_set_rhs_with_ops (&gsi
, new_code
, new_rhs1
, new_rhs2
);
3116 update_stmt (gsi_stmt (gsi
));
3118 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3119 return gsi_stmt (gsi
);
3122 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3123 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3128 /* Strength-reduce the statement represented by candidate C by replacing
3129 it with an equivalent addition or subtraction. I is the index into
3130 the increment vector identifying C's increment. NEW_VAR is used to
3131 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3132 is the rhs1 to use in creating the add/subtract. */
3135 replace_one_candidate (slsr_cand_t c
, unsigned i
, tree
*new_var
,
3138 gimple stmt_to_print
= NULL
;
3139 tree orig_rhs1
, orig_rhs2
;
3141 enum tree_code orig_code
, repl_code
;
3142 double_int cand_incr
;
3144 orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3145 orig_rhs1
= gimple_assign_rhs1 (c
->cand_stmt
);
3146 orig_rhs2
= gimple_assign_rhs2 (c
->cand_stmt
);
3147 cand_incr
= cand_increment (c
);
3149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3151 fputs ("Replacing: ", dump_file
);
3152 print_gimple_stmt (dump_file
, c
->cand_stmt
, 0, 0);
3153 stmt_to_print
= c
->cand_stmt
;
3156 if (address_arithmetic_p
)
3157 repl_code
= POINTER_PLUS_EXPR
;
3159 repl_code
= PLUS_EXPR
;
3161 /* If the increment has an initializer T_0, replace the candidate
3162 statement with an add of the basis name and the initializer. */
3163 if (incr_vec
[i
].initializer
)
3165 tree init_type
= TREE_TYPE (incr_vec
[i
].initializer
);
3166 tree orig_type
= TREE_TYPE (orig_rhs2
);
3168 if (types_compatible_p (orig_type
, init_type
))
3169 rhs2
= incr_vec
[i
].initializer
;
3171 rhs2
= introduce_cast_before_cand (c
, orig_type
,
3172 incr_vec
[i
].initializer
,
3175 if (incr_vec
[i
].incr
!= cand_incr
)
3177 gcc_assert (repl_code
== PLUS_EXPR
);
3178 repl_code
= MINUS_EXPR
;
3181 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3182 orig_code
, orig_rhs1
, orig_rhs2
,
3186 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3187 with a subtract of the stride from the basis name, a copy
3188 from the basis name, or an add of the stride to the basis
3189 name, respectively. It may be necessary to introduce a
3190 cast (or reuse an existing cast). */
3191 else if (cand_incr
.is_one ())
3193 tree stride_type
= TREE_TYPE (c
->stride
);
3194 tree orig_type
= TREE_TYPE (orig_rhs2
);
3196 if (types_compatible_p (orig_type
, stride_type
))
3199 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
, new_var
);
3201 stmt_to_print
= replace_rhs_if_not_dup (repl_code
, basis_name
, rhs2
,
3202 orig_code
, orig_rhs1
, orig_rhs2
,
3206 else if (cand_incr
.is_minus_one ())
3208 tree stride_type
= TREE_TYPE (c
->stride
);
3209 tree orig_type
= TREE_TYPE (orig_rhs2
);
3210 gcc_assert (repl_code
!= POINTER_PLUS_EXPR
);
3212 if (types_compatible_p (orig_type
, stride_type
))
3215 rhs2
= introduce_cast_before_cand (c
, orig_type
, c
->stride
, new_var
);
3217 if (orig_code
!= MINUS_EXPR
3218 || !operand_equal_p (basis_name
, orig_rhs1
, 0)
3219 || !operand_equal_p (rhs2
, orig_rhs2
, 0))
3221 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3222 gimple_assign_set_rhs_with_ops (&gsi
, MINUS_EXPR
, basis_name
, rhs2
);
3223 update_stmt (gsi_stmt (gsi
));
3225 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3226 stmt_to_print
= gsi_stmt (gsi
);
3228 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3229 fputs (" (duplicate, not actually replacing)\n", dump_file
);
3232 else if (cand_incr
.is_zero ())
3234 tree lhs
= gimple_assign_lhs (c
->cand_stmt
);
3235 tree lhs_type
= TREE_TYPE (lhs
);
3236 tree basis_type
= TREE_TYPE (basis_name
);
3238 if (types_compatible_p (lhs_type
, basis_type
))
3240 gimple copy_stmt
= gimple_build_assign (lhs
, basis_name
);
3241 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3242 gimple_set_location (copy_stmt
, gimple_location (c
->cand_stmt
));
3243 gsi_replace (&gsi
, copy_stmt
, false);
3245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3246 stmt_to_print
= copy_stmt
;
3250 gimple_stmt_iterator gsi
= gsi_for_stmt (c
->cand_stmt
);
3251 gimple cast_stmt
= gimple_build_assign_with_ops (NOP_EXPR
, lhs
,
3254 gimple_set_location (cast_stmt
, gimple_location (c
->cand_stmt
));
3255 gsi_replace (&gsi
, cast_stmt
, false);
3257 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3258 stmt_to_print
= cast_stmt
;
3264 if (dump_file
&& (dump_flags
& TDF_DETAILS
) && stmt_to_print
)
3266 fputs ("With: ", dump_file
);
3267 print_gimple_stmt (dump_file
, stmt_to_print
, 0, 0);
3268 fputs ("\n", dump_file
);
3272 /* For each candidate in the tree rooted at C, replace it with
3273 an increment if such has been shown to be profitable. */
3276 replace_profitable_candidates (slsr_cand_t c
)
3278 if (!cand_already_replaced (c
))
3280 double_int increment
= cand_abs_increment (c
);
3281 tree new_var
= NULL
;
3282 enum tree_code orig_code
= gimple_assign_rhs_code (c
->cand_stmt
);
3285 i
= incr_vec_index (increment
);
3287 /* Only process profitable increments. Nothing useful can be done
3288 to a cast or copy. */
3289 if (profitable_increment_p (i
)
3290 && orig_code
!= MODIFY_EXPR
3291 && orig_code
!= NOP_EXPR
)
3293 if (phi_dependent_cand_p (c
))
3295 gimple phi
= lookup_cand (c
->def_phi
)->cand_stmt
;
3297 if (all_phi_incrs_profitable (c
, phi
))
3299 /* Look up the LHS SSA name from C's basis. This will be
3300 the RHS1 of the adds we will introduce to create new
3302 slsr_cand_t basis
= lookup_cand (c
->basis
);
3303 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3305 /* Create a new phi statement that will represent C's true
3306 basis after the transformation is complete. */
3307 location_t loc
= gimple_location (c
->cand_stmt
);
3308 tree name
= create_phi_basis (c
, phi
, basis_name
,
3309 loc
, UNKNOWN_STRIDE
);
3311 /* Replace C with an add of the new basis phi and the
3313 replace_one_candidate (c
, i
, &new_var
, name
);
3318 slsr_cand_t basis
= lookup_cand (c
->basis
);
3319 tree basis_name
= gimple_assign_lhs (basis
->cand_stmt
);
3320 replace_one_candidate (c
, i
, &new_var
, basis_name
);
3326 replace_profitable_candidates (lookup_cand (c
->sibling
));
3329 replace_profitable_candidates (lookup_cand (c
->dependent
));
3332 /* Analyze costs of related candidates in the candidate vector,
3333 and make beneficial replacements. */
3336 analyze_candidates_and_replace (void)
3341 /* Each candidate that has a null basis and a non-null
3342 dependent is the root of a tree of related statements.
3343 Analyze each tree to determine a subset of those
3344 statements that can be replaced with maximum benefit. */
3345 FOR_EACH_VEC_ELT (cand_vec
, i
, c
)
3347 slsr_cand_t first_dep
;
3349 if (c
->basis
!= 0 || c
->dependent
== 0)
3352 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3353 fprintf (dump_file
, "\nProcessing dependency tree rooted at %d.\n",
3356 first_dep
= lookup_cand (c
->dependent
);
3358 /* If this is a chain of CAND_REFs, unconditionally replace
3359 each of them with a strength-reduced data reference. */
3360 if (c
->kind
== CAND_REF
)
3363 /* If the common stride of all related candidates is a known
3364 constant, each candidate without a phi-dependence can be
3365 profitably replaced. Each replaces a multiply by a single
3366 add, with the possibility that a feeding add also goes dead.
3367 A candidate with a phi-dependence is replaced only if the
3368 compensation code it requires is offset by the strength
3369 reduction savings. */
3370 else if (TREE_CODE (c
->stride
) == INTEGER_CST
)
3371 replace_uncond_cands_and_profitable_phis (first_dep
);
3373 /* When the stride is an SSA name, it may still be profitable
3374 to replace some or all of the dependent candidates, depending
3375 on whether the introduced increments can be reused, or are
3376 less expensive to calculate than the replaced statements. */
3380 enum machine_mode mode
;
3383 /* Determine whether we'll be generating pointer arithmetic
3384 when replacing candidates. */
3385 address_arithmetic_p
= (c
->kind
== CAND_ADD
3386 && POINTER_TYPE_P (c
->cand_type
));
3388 /* If all candidates have already been replaced under other
3389 interpretations, nothing remains to be done. */
3390 length
= count_candidates (c
);
3394 /* Construct an array of increments for this candidate chain. */
3395 incr_vec
= XNEWVEC (incr_info
, length
);
3397 record_increments (c
);
3399 /* Determine which increments are profitable to replace. */
3400 mode
= TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c
->cand_stmt
)));
3401 speed
= optimize_cands_for_speed_p (c
);
3402 analyze_increments (first_dep
, mode
, speed
);
3404 /* Insert initializers of the form T_0 = stride * increment
3405 for use in profitable replacements. */
3406 insert_initializers (first_dep
);
3409 /* Perform the replacements. */
3410 replace_profitable_candidates (first_dep
);
3417 execute_strength_reduction (void)
3419 struct dom_walk_data walk_data
;
3421 /* Create the obstack where candidates will reside. */
3422 gcc_obstack_init (&cand_obstack
);
3424 /* Allocate the candidate vector. */
3425 cand_vec
.create (128);
3427 /* Allocate the mapping from statements to candidate indices. */
3428 stmt_cand_map
= pointer_map_create ();
3430 /* Create the obstack where candidate chains will reside. */
3431 gcc_obstack_init (&chain_obstack
);
3433 /* Allocate the mapping from base expressions to candidate chains. */
3434 base_cand_map
.create (500);
3436 /* Initialize the loop optimizer. We need to detect flow across
3437 back edges, and this gives us dominator information as well. */
3438 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
3440 /* Set up callbacks for the generic dominator tree walker. */
3441 walk_data
.dom_direction
= CDI_DOMINATORS
;
3442 walk_data
.initialize_block_local_data
= NULL
;
3443 walk_data
.before_dom_children
= find_candidates_in_block
;
3444 walk_data
.after_dom_children
= NULL
;
3445 walk_data
.global_data
= NULL
;
3446 walk_data
.block_local_data_size
= 0;
3447 init_walk_dominator_tree (&walk_data
);
3449 /* Walk the CFG in predominator order looking for strength reduction
3451 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
3453 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3456 dump_cand_chains ();
3459 /* Analyze costs and make appropriate replacements. */
3460 analyze_candidates_and_replace ();
3462 /* Free resources. */
3463 fini_walk_dominator_tree (&walk_data
);
3464 loop_optimizer_finalize ();
3465 base_cand_map
.dispose ();
3466 obstack_free (&chain_obstack
, NULL
);
3467 pointer_map_destroy (stmt_cand_map
);
3468 cand_vec
.release ();
3469 obstack_free (&cand_obstack
, NULL
);
3475 gate_strength_reduction (void)
3477 return flag_tree_slsr
;
3480 struct gimple_opt_pass pass_strength_reduction
=
3485 OPTGROUP_NONE
, /* optinfo_flags */
3486 gate_strength_reduction
, /* gate */
3487 execute_strength_reduction
, /* execute */
3490 0, /* static_pass_number */
3491 TV_GIMPLE_SLSR
, /* tv_id */
3492 PROP_cfg
| PROP_ssa
, /* properties_required */
3493 0, /* properties_provided */
3494 0, /* properties_destroyed */
3495 0, /* todo_flags_start */
3496 TODO_verify_ssa
/* todo_flags_finish */