2018-02-19 Sebastian Perta <sebastian.perta@renesas.com>
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blob1c00f094db804a5b71d54efb5a249e66d33e3e93
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2018 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "tree-eh.h"
59 #include "builtins.h"
61 /* Information about a strength reduction candidate. Each statement
62 in the candidate table represents an expression of one of the
63 following forms (the special case of CAND_REF will be described
64 later):
66 (CAND_MULT) S1: X = (B + i) * S
67 (CAND_ADD) S1: X = B + (i * S)
69 Here X and B are SSA names, i is an integer constant, and S is
70 either an SSA name or a constant. We call B the "base," i the
71 "index", and S the "stride."
73 Any statement S0 that dominates S1 and is of the form:
75 (CAND_MULT) S0: Y = (B + i') * S
76 (CAND_ADD) S0: Y = B + (i' * S)
78 is called a "basis" for S1. In both cases, S1 may be replaced by
80 S1': X = Y + (i - i') * S,
82 where (i - i') * S is folded to the extent possible.
84 All gimple statements are visited in dominator order, and each
85 statement that may contribute to one of the forms of S1 above is
86 given at least one entry in the candidate table. Such statements
87 include addition, pointer addition, subtraction, multiplication,
88 negation, copies, and nontrivial type casts. If a statement may
89 represent more than one expression of the forms of S1 above,
90 multiple "interpretations" are stored in the table and chained
91 together. Examples:
93 * An add of two SSA names may treat either operand as the base.
94 * A multiply of two SSA names, likewise.
95 * A copy or cast may be thought of as either a CAND_MULT with
96 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
98 Candidate records are allocated from an obstack. They are addressed
99 both from a hash table keyed on S1, and from a vector of candidate
100 pointers arranged in predominator order.
102 Opportunity note
103 ----------------
104 Currently we don't recognize:
106 S0: Y = (S * i') - B
107 S1: X = (S * i) - B
109 as a strength reduction opportunity, even though this S1 would
110 also be replaceable by the S1' above. This can be added if it
111 comes up in practice.
113 Strength reduction in addressing
114 --------------------------------
115 There is another kind of candidate known as CAND_REF. A CAND_REF
116 describes a statement containing a memory reference having
117 complex addressing that might benefit from strength reduction.
118 Specifically, we are interested in references for which
119 get_inner_reference returns a base address, offset, and bitpos as
120 follows:
122 base: MEM_REF (T1, C1)
123 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
124 bitpos: C4 * BITS_PER_UNIT
126 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
127 arbitrary integer constants. Note that C2 may be zero, in which
128 case the offset will be MULT_EXPR (T2, C3).
130 When this pattern is recognized, the original memory reference
131 can be replaced with:
133 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
134 C1 + (C2 * C3) + C4)
136 which distributes the multiply to allow constant folding. When
137 two or more addressing expressions can be represented by MEM_REFs
138 of this form, differing only in the constants C1, C2, and C4,
139 making this substitution produces more efficient addressing during
140 the RTL phases. When there are not at least two expressions with
141 the same values of T1, T2, and C3, there is nothing to be gained
142 by the replacement.
144 Strength reduction of CAND_REFs uses the same infrastructure as
145 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
146 field, MULT_EXPR (T2, C3) in the stride (S) field, and
147 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
148 is thus another CAND_REF with the same B and S values. When at
149 least two CAND_REFs are chained together using the basis relation,
150 each of them is replaced as above, resulting in improved code
151 generation for addressing.
153 Conditional candidates
154 ======================
156 Conditional candidates are best illustrated with an example.
157 Consider the code sequence:
159 (1) x_0 = ...;
160 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
161 if (...)
162 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
163 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
164 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
165 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
167 Here strength reduction is complicated by the uncertain value of x_2.
168 A legitimate transformation is:
170 (1) x_0 = ...;
171 (2) a_0 = x_0 * 5;
172 if (...)
174 (3) [x_1 = x_0 + 1;]
175 (3a) t_1 = a_0 + 5;
177 (4) [x_2 = PHI <x_0, x_1>;]
178 (4a) t_2 = PHI <a_0, t_1>;
179 (5) [x_3 = x_2 + 1;]
180 (6r) a_1 = t_2 + 5;
182 where the bracketed instructions may go dead.
184 To recognize this opportunity, we have to observe that statement (6)
185 has a "hidden basis" (2). The hidden basis is unlike a normal basis
186 in that the statement and the hidden basis have different base SSA
187 names (x_2 and x_0, respectively). The relationship is established
188 when a statement's base name (x_2) is defined by a phi statement (4),
189 each argument of which (x_0, x_1) has an identical "derived base name."
190 If the argument is defined by a candidate (as x_1 is by (3)) that is a
191 CAND_ADD having a stride of 1, the derived base name of the argument is
192 the base name of the candidate (x_0). Otherwise, the argument itself
193 is its derived base name (as is the case with argument x_0).
195 The hidden basis for statement (6) is the nearest dominating candidate
196 whose base name is the derived base name (x_0) of the feeding phi (4),
197 and whose stride is identical to that of the statement. We can then
198 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
199 allowing the final replacement of (6) by the strength-reduced (6r).
201 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
202 A CAND_PHI is not a candidate for replacement, but is maintained in the
203 candidate table to ease discovery of hidden bases. Any phi statement
204 whose arguments share a common derived base name is entered into the
205 table with the derived base name, an (arbitrary) index of zero, and a
206 stride of 1. A statement with a hidden basis can then be detected by
207 simply looking up its feeding phi definition in the candidate table,
208 extracting the derived base name, and searching for a basis in the
209 usual manner after substituting the derived base name.
211 Note that the transformation is only valid when the original phi and
212 the statements that define the phi's arguments are all at the same
213 position in the loop hierarchy. */
216 /* Index into the candidate vector, offset by 1. VECs are zero-based,
217 while cand_idx's are one-based, with zero indicating null. */
218 typedef unsigned cand_idx;
220 /* The kind of candidate. */
221 enum cand_kind
223 CAND_MULT,
224 CAND_ADD,
225 CAND_REF,
226 CAND_PHI
229 struct slsr_cand_d
231 /* The candidate statement S1. */
232 gimple *cand_stmt;
234 /* The base expression B: often an SSA name, but not always. */
235 tree base_expr;
237 /* The stride S. */
238 tree stride;
240 /* The index constant i. */
241 widest_int index;
243 /* The type of the candidate. This is normally the type of base_expr,
244 but casts may have occurred when combining feeding instructions.
245 A candidate can only be a basis for candidates of the same final type.
246 (For CAND_REFs, this is the type to be used for operand 1 of the
247 replacement MEM_REF.) */
248 tree cand_type;
250 /* The type to be used to interpret the stride field when the stride
251 is not a constant. Normally the same as the type of the recorded
252 stride, but when the stride has been cast we need to maintain that
253 knowledge in order to make legal substitutions without losing
254 precision. When the stride is a constant, this will be sizetype. */
255 tree stride_type;
257 /* The kind of candidate (CAND_MULT, etc.). */
258 enum cand_kind kind;
260 /* Index of this candidate in the candidate vector. */
261 cand_idx cand_num;
263 /* Index of the next candidate record for the same statement.
264 A statement may be useful in more than one way (e.g., due to
265 commutativity). So we can have multiple "interpretations"
266 of a statement. */
267 cand_idx next_interp;
269 /* Index of the basis statement S0, if any, in the candidate vector. */
270 cand_idx basis;
272 /* First candidate for which this candidate is a basis, if one exists. */
273 cand_idx dependent;
275 /* Next candidate having the same basis as this one. */
276 cand_idx sibling;
278 /* If this is a conditional candidate, the CAND_PHI candidate
279 that defines the base SSA name B. */
280 cand_idx def_phi;
282 /* Savings that can be expected from eliminating dead code if this
283 candidate is replaced. */
284 int dead_savings;
286 /* For PHI candidates, use a visited flag to keep from processing the
287 same PHI twice from multiple paths. */
288 int visited;
290 /* We sometimes have to cache a phi basis with a phi candidate to
291 avoid processing it twice. Valid only if visited==1. */
292 tree cached_basis;
295 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
296 typedef const struct slsr_cand_d *const_slsr_cand_t;
298 /* Pointers to candidates are chained together as part of a mapping
299 from base expressions to the candidates that use them. */
301 struct cand_chain_d
303 /* Base expression for the chain of candidates: often, but not
304 always, an SSA name. */
305 tree base_expr;
307 /* Pointer to a candidate. */
308 slsr_cand_t cand;
310 /* Chain pointer. */
311 struct cand_chain_d *next;
315 typedef struct cand_chain_d cand_chain, *cand_chain_t;
316 typedef const struct cand_chain_d *const_cand_chain_t;
318 /* Information about a unique "increment" associated with candidates
319 having an SSA name for a stride. An increment is the difference
320 between the index of the candidate and the index of its basis,
321 i.e., (i - i') as discussed in the module commentary.
323 When we are not going to generate address arithmetic we treat
324 increments that differ only in sign as the same, allowing sharing
325 of the cost of initializers. The absolute value of the increment
326 is stored in the incr_info. */
328 struct incr_info_d
330 /* The increment that relates a candidate to its basis. */
331 widest_int incr;
333 /* How many times the increment occurs in the candidate tree. */
334 unsigned count;
336 /* Cost of replacing candidates using this increment. Negative and
337 zero costs indicate replacement should be performed. */
338 int cost;
340 /* If this increment is profitable but is not -1, 0, or 1, it requires
341 an initializer T_0 = stride * incr to be found or introduced in the
342 nearest common dominator of all candidates. This field holds T_0
343 for subsequent use. */
344 tree initializer;
346 /* If the initializer was found to already exist, this is the block
347 where it was found. */
348 basic_block init_bb;
351 typedef struct incr_info_d incr_info, *incr_info_t;
353 /* Candidates are maintained in a vector. If candidate X dominates
354 candidate Y, then X appears before Y in the vector; but the
355 converse does not necessarily hold. */
356 static vec<slsr_cand_t> cand_vec;
358 enum cost_consts
360 COST_NEUTRAL = 0,
361 COST_INFINITE = 1000
364 enum stride_status
366 UNKNOWN_STRIDE = 0,
367 KNOWN_STRIDE = 1
370 enum phi_adjust_status
372 NOT_PHI_ADJUST = 0,
373 PHI_ADJUST = 1
376 enum count_phis_status
378 DONT_COUNT_PHIS = 0,
379 COUNT_PHIS = 1
382 /* Constrain how many PHI nodes we will visit for a conditional
383 candidate (depth and breadth). */
384 const int MAX_SPREAD = 16;
386 /* Pointer map embodying a mapping from statements to candidates. */
387 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
389 /* Obstack for candidates. */
390 static struct obstack cand_obstack;
392 /* Obstack for candidate chains. */
393 static struct obstack chain_obstack;
395 /* An array INCR_VEC of incr_infos is used during analysis of related
396 candidates having an SSA name for a stride. INCR_VEC_LEN describes
397 its current length. MAX_INCR_VEC_LEN is used to avoid costly
398 pathological cases. */
399 static incr_info_t incr_vec;
400 static unsigned incr_vec_len;
401 const int MAX_INCR_VEC_LEN = 16;
403 /* For a chain of candidates with unknown stride, indicates whether or not
404 we must generate pointer arithmetic when replacing statements. */
405 static bool address_arithmetic_p;
407 /* Forward function declarations. */
408 static slsr_cand_t base_cand_from_table (tree);
409 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
410 static bool legal_cast_p_1 (tree, tree);
412 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
414 static slsr_cand_t
415 lookup_cand (cand_idx idx)
417 return cand_vec[idx - 1];
420 /* Helper for hashing a candidate chain header. */
422 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
424 static inline hashval_t hash (const cand_chain *);
425 static inline bool equal (const cand_chain *, const cand_chain *);
428 inline hashval_t
429 cand_chain_hasher::hash (const cand_chain *p)
431 tree base_expr = p->base_expr;
432 return iterative_hash_expr (base_expr, 0);
435 inline bool
436 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
438 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
441 /* Hash table embodying a mapping from base exprs to chains of candidates. */
442 static hash_table<cand_chain_hasher> *base_cand_map;
444 /* Pointer map used by tree_to_aff_combination_expand. */
445 static hash_map<tree, name_expansion *> *name_expansions;
446 /* Pointer map embodying a mapping from bases to alternative bases. */
447 static hash_map<tree, tree> *alt_base_map;
449 /* Given BASE, use the tree affine combiniation facilities to
450 find the underlying tree expression for BASE, with any
451 immediate offset excluded.
453 N.B. we should eliminate this backtracking with better forward
454 analysis in a future release. */
456 static tree
457 get_alternative_base (tree base)
459 tree *result = alt_base_map->get (base);
461 if (result == NULL)
463 tree expr;
464 aff_tree aff;
466 tree_to_aff_combination_expand (base, TREE_TYPE (base),
467 &aff, &name_expansions);
468 aff.offset = 0;
469 expr = aff_combination_to_tree (&aff);
471 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
473 return expr == base ? NULL : expr;
476 return *result;
479 /* Look in the candidate table for a CAND_PHI that defines BASE and
480 return it if found; otherwise return NULL. */
482 static cand_idx
483 find_phi_def (tree base)
485 slsr_cand_t c;
487 if (TREE_CODE (base) != SSA_NAME)
488 return 0;
490 c = base_cand_from_table (base);
492 if (!c || c->kind != CAND_PHI
493 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c->cand_stmt)))
494 return 0;
496 return c->cand_num;
499 /* Determine whether all uses of NAME are directly or indirectly
500 used by STMT. That is, we want to know whether if STMT goes
501 dead, the definition of NAME also goes dead. */
502 static bool
503 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
505 gimple *use_stmt;
506 imm_use_iterator iter;
507 bool retval = true;
509 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
511 if (use_stmt == stmt || is_gimple_debug (use_stmt))
512 continue;
514 if (!is_gimple_assign (use_stmt)
515 || !gimple_get_lhs (use_stmt)
516 || !is_gimple_reg (gimple_get_lhs (use_stmt))
517 || recurse >= 10
518 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
519 recurse + 1))
521 retval = false;
522 BREAK_FROM_IMM_USE_STMT (iter);
526 return retval;
529 /* Helper routine for find_basis_for_candidate. May be called twice:
530 once for the candidate's base expr, and optionally again either for
531 the candidate's phi definition or for a CAND_REF's alternative base
532 expression. */
534 static slsr_cand_t
535 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
537 cand_chain mapping_key;
538 cand_chain_t chain;
539 slsr_cand_t basis = NULL;
541 // Limit potential of N^2 behavior for long candidate chains.
542 int iters = 0;
543 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
545 mapping_key.base_expr = base_expr;
546 chain = base_cand_map->find (&mapping_key);
548 for (; chain && iters < max_iters; chain = chain->next, ++iters)
550 slsr_cand_t one_basis = chain->cand;
552 if (one_basis->kind != c->kind
553 || one_basis->cand_stmt == c->cand_stmt
554 || !operand_equal_p (one_basis->stride, c->stride, 0)
555 || !types_compatible_p (one_basis->cand_type, c->cand_type)
556 || !types_compatible_p (one_basis->stride_type, c->stride_type)
557 || !dominated_by_p (CDI_DOMINATORS,
558 gimple_bb (c->cand_stmt),
559 gimple_bb (one_basis->cand_stmt)))
560 continue;
562 tree lhs = gimple_assign_lhs (one_basis->cand_stmt);
563 if (lhs && TREE_CODE (lhs) == SSA_NAME
564 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
565 continue;
567 if (!basis || basis->cand_num < one_basis->cand_num)
568 basis = one_basis;
571 return basis;
574 /* Use the base expr from candidate C to look for possible candidates
575 that can serve as a basis for C. Each potential basis must also
576 appear in a block that dominates the candidate statement and have
577 the same stride and type. If more than one possible basis exists,
578 the one with highest index in the vector is chosen; this will be
579 the most immediately dominating basis. */
581 static int
582 find_basis_for_candidate (slsr_cand_t c)
584 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
586 /* If a candidate doesn't have a basis using its base expression,
587 it may have a basis hidden by one or more intervening phis. */
588 if (!basis && c->def_phi)
590 basic_block basis_bb, phi_bb;
591 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
592 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
594 if (basis)
596 /* A hidden basis must dominate the phi-definition of the
597 candidate's base name. */
598 phi_bb = gimple_bb (phi_cand->cand_stmt);
599 basis_bb = gimple_bb (basis->cand_stmt);
601 if (phi_bb == basis_bb
602 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
604 basis = NULL;
605 c->basis = 0;
608 /* If we found a hidden basis, estimate additional dead-code
609 savings if the phi and its feeding statements can be removed. */
610 tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
611 if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
612 c->dead_savings += phi_cand->dead_savings;
616 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
618 tree alt_base_expr = get_alternative_base (c->base_expr);
619 if (alt_base_expr)
620 basis = find_basis_for_base_expr (c, alt_base_expr);
623 if (basis)
625 c->sibling = basis->dependent;
626 basis->dependent = c->cand_num;
627 return basis->cand_num;
630 return 0;
633 /* Record a mapping from BASE to C, indicating that C may potentially serve
634 as a basis using that base expression. BASE may be the same as
635 C->BASE_EXPR; alternatively BASE can be a different tree that share the
636 underlining expression of C->BASE_EXPR. */
638 static void
639 record_potential_basis (slsr_cand_t c, tree base)
641 cand_chain_t node;
642 cand_chain **slot;
644 gcc_assert (base);
646 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
647 node->base_expr = base;
648 node->cand = c;
649 node->next = NULL;
650 slot = base_cand_map->find_slot (node, INSERT);
652 if (*slot)
654 cand_chain_t head = (cand_chain_t) (*slot);
655 node->next = head->next;
656 head->next = node;
658 else
659 *slot = node;
662 /* Allocate storage for a new candidate and initialize its fields.
663 Attempt to find a basis for the candidate.
665 For CAND_REF, an alternative base may also be recorded and used
666 to find a basis. This helps cases where the expression hidden
667 behind BASE (which is usually an SSA_NAME) has immediate offset,
668 e.g.
670 a2[i][j] = 1;
671 a2[i + 20][j] = 2; */
673 static slsr_cand_t
674 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
675 const widest_int &index, tree stride, tree ctype,
676 tree stype, unsigned savings)
678 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
679 sizeof (slsr_cand));
680 c->cand_stmt = gs;
681 c->base_expr = base;
682 c->stride = stride;
683 c->index = index;
684 c->cand_type = ctype;
685 c->stride_type = stype;
686 c->kind = kind;
687 c->cand_num = cand_vec.length () + 1;
688 c->next_interp = 0;
689 c->dependent = 0;
690 c->sibling = 0;
691 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
692 c->dead_savings = savings;
693 c->visited = 0;
694 c->cached_basis = NULL_TREE;
696 cand_vec.safe_push (c);
698 if (kind == CAND_PHI)
699 c->basis = 0;
700 else
701 c->basis = find_basis_for_candidate (c);
703 record_potential_basis (c, base);
704 if (flag_expensive_optimizations && kind == CAND_REF)
706 tree alt_base = get_alternative_base (base);
707 if (alt_base)
708 record_potential_basis (c, alt_base);
711 return c;
714 /* Determine the target cost of statement GS when compiling according
715 to SPEED. */
717 static int
718 stmt_cost (gimple *gs, bool speed)
720 tree lhs, rhs1, rhs2;
721 machine_mode lhs_mode;
723 gcc_assert (is_gimple_assign (gs));
724 lhs = gimple_assign_lhs (gs);
725 rhs1 = gimple_assign_rhs1 (gs);
726 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
728 switch (gimple_assign_rhs_code (gs))
730 case MULT_EXPR:
731 rhs2 = gimple_assign_rhs2 (gs);
733 if (tree_fits_shwi_p (rhs2))
734 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
736 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
737 return mul_cost (speed, lhs_mode);
739 case PLUS_EXPR:
740 case POINTER_PLUS_EXPR:
741 case MINUS_EXPR:
742 return add_cost (speed, lhs_mode);
744 case NEGATE_EXPR:
745 return neg_cost (speed, lhs_mode);
747 CASE_CONVERT:
748 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
750 /* Note that we don't assign costs to copies that in most cases
751 will go away. */
752 case SSA_NAME:
753 return 0;
755 default:
759 gcc_unreachable ();
760 return 0;
763 /* Look up the defining statement for BASE_IN and return a pointer
764 to its candidate in the candidate table, if any; otherwise NULL.
765 Only CAND_ADD and CAND_MULT candidates are returned. */
767 static slsr_cand_t
768 base_cand_from_table (tree base_in)
770 slsr_cand_t *result;
772 gimple *def = SSA_NAME_DEF_STMT (base_in);
773 if (!def)
774 return (slsr_cand_t) NULL;
776 result = stmt_cand_map->get (def);
778 if (result && (*result)->kind != CAND_REF)
779 return *result;
781 return (slsr_cand_t) NULL;
784 /* Add an entry to the statement-to-candidate mapping. */
786 static void
787 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
789 gcc_assert (!stmt_cand_map->put (gs, c));
792 /* Given PHI which contains a phi statement, determine whether it
793 satisfies all the requirements of a phi candidate. If so, create
794 a candidate. Note that a CAND_PHI never has a basis itself, but
795 is used to help find a basis for subsequent candidates. */
797 static void
798 slsr_process_phi (gphi *phi, bool speed)
800 unsigned i;
801 tree arg0_base = NULL_TREE, base_type;
802 slsr_cand_t c;
803 struct loop *cand_loop = gimple_bb (phi)->loop_father;
804 unsigned savings = 0;
806 /* A CAND_PHI requires each of its arguments to have the same
807 derived base name. (See the module header commentary for a
808 definition of derived base names.) Furthermore, all feeding
809 definitions must be in the same position in the loop hierarchy
810 as PHI. */
812 for (i = 0; i < gimple_phi_num_args (phi); i++)
814 slsr_cand_t arg_cand;
815 tree arg = gimple_phi_arg_def (phi, i);
816 tree derived_base_name = NULL_TREE;
817 gimple *arg_stmt = NULL;
818 basic_block arg_bb = NULL;
820 if (TREE_CODE (arg) != SSA_NAME)
821 return;
823 arg_cand = base_cand_from_table (arg);
825 if (arg_cand)
827 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
829 if (!arg_cand->next_interp)
830 return;
832 arg_cand = lookup_cand (arg_cand->next_interp);
835 if (!integer_onep (arg_cand->stride))
836 return;
838 derived_base_name = arg_cand->base_expr;
839 arg_stmt = arg_cand->cand_stmt;
840 arg_bb = gimple_bb (arg_stmt);
842 /* Gather potential dead code savings if the phi statement
843 can be removed later on. */
844 if (uses_consumed_by_stmt (arg, phi))
846 if (gimple_code (arg_stmt) == GIMPLE_PHI)
847 savings += arg_cand->dead_savings;
848 else
849 savings += stmt_cost (arg_stmt, speed);
852 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
854 derived_base_name = arg;
855 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
858 if (!arg_bb || arg_bb->loop_father != cand_loop)
859 return;
861 if (i == 0)
862 arg0_base = derived_base_name;
863 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
864 return;
867 /* Create the candidate. "alloc_cand_and_find_basis" is named
868 misleadingly for this case, as no basis will be sought for a
869 CAND_PHI. */
870 base_type = TREE_TYPE (arg0_base);
872 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
873 0, integer_one_node, base_type,
874 sizetype, savings);
876 /* Add the candidate to the statement-candidate mapping. */
877 add_cand_for_stmt (phi, c);
880 /* Given PBASE which is a pointer to tree, look up the defining
881 statement for it and check whether the candidate is in the
882 form of:
884 X = B + (1 * S), S is integer constant
885 X = B + (i * S), S is integer one
887 If so, set PBASE to the candidate's base_expr and return double
888 int (i * S).
889 Otherwise, just return double int zero. */
891 static widest_int
892 backtrace_base_for_ref (tree *pbase)
894 tree base_in = *pbase;
895 slsr_cand_t base_cand;
897 STRIP_NOPS (base_in);
899 /* Strip off widening conversion(s) to handle cases where
900 e.g. 'B' is widened from an 'int' in order to calculate
901 a 64-bit address. */
902 if (CONVERT_EXPR_P (base_in)
903 && legal_cast_p_1 (TREE_TYPE (base_in),
904 TREE_TYPE (TREE_OPERAND (base_in, 0))))
905 base_in = get_unwidened (base_in, NULL_TREE);
907 if (TREE_CODE (base_in) != SSA_NAME)
908 return 0;
910 base_cand = base_cand_from_table (base_in);
912 while (base_cand && base_cand->kind != CAND_PHI)
914 if (base_cand->kind == CAND_ADD
915 && base_cand->index == 1
916 && TREE_CODE (base_cand->stride) == INTEGER_CST)
918 /* X = B + (1 * S), S is integer constant. */
919 *pbase = base_cand->base_expr;
920 return wi::to_widest (base_cand->stride);
922 else if (base_cand->kind == CAND_ADD
923 && TREE_CODE (base_cand->stride) == INTEGER_CST
924 && integer_onep (base_cand->stride))
926 /* X = B + (i * S), S is integer one. */
927 *pbase = base_cand->base_expr;
928 return base_cand->index;
931 if (base_cand->next_interp)
932 base_cand = lookup_cand (base_cand->next_interp);
933 else
934 base_cand = NULL;
937 return 0;
940 /* Look for the following pattern:
942 *PBASE: MEM_REF (T1, C1)
944 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
946 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
948 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
950 *PINDEX: C4 * BITS_PER_UNIT
952 If not present, leave the input values unchanged and return FALSE.
953 Otherwise, modify the input values as follows and return TRUE:
955 *PBASE: T1
956 *POFFSET: MULT_EXPR (T2, C3)
957 *PINDEX: C1 + (C2 * C3) + C4
959 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
960 will be further restructured to:
962 *PBASE: T1
963 *POFFSET: MULT_EXPR (T2', C3)
964 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
966 static bool
967 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
968 tree *ptype)
970 tree base = *pbase, offset = *poffset;
971 widest_int index = *pindex;
972 tree mult_op0, t1, t2, type;
973 widest_int c1, c2, c3, c4, c5;
974 offset_int mem_offset;
976 if (!base
977 || !offset
978 || TREE_CODE (base) != MEM_REF
979 || !mem_ref_offset (base).is_constant (&mem_offset)
980 || TREE_CODE (offset) != MULT_EXPR
981 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
982 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
983 return false;
985 t1 = TREE_OPERAND (base, 0);
986 c1 = widest_int::from (mem_offset, SIGNED);
987 type = TREE_TYPE (TREE_OPERAND (base, 1));
989 mult_op0 = TREE_OPERAND (offset, 0);
990 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
992 if (TREE_CODE (mult_op0) == PLUS_EXPR)
994 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
996 t2 = TREE_OPERAND (mult_op0, 0);
997 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
999 else
1000 return false;
1002 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
1004 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1006 t2 = TREE_OPERAND (mult_op0, 0);
1007 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1009 else
1010 return false;
1012 else
1014 t2 = mult_op0;
1015 c2 = 0;
1018 c4 = index >> LOG2_BITS_PER_UNIT;
1019 c5 = backtrace_base_for_ref (&t2);
1021 *pbase = t1;
1022 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1023 wide_int_to_tree (sizetype, c3));
1024 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1025 *ptype = type;
1027 return true;
1030 /* Given GS which contains a data reference, create a CAND_REF entry in
1031 the candidate table and attempt to find a basis. */
1033 static void
1034 slsr_process_ref (gimple *gs)
1036 tree ref_expr, base, offset, type;
1037 poly_int64 bitsize, bitpos;
1038 machine_mode mode;
1039 int unsignedp, reversep, volatilep;
1040 slsr_cand_t c;
1042 if (gimple_vdef (gs))
1043 ref_expr = gimple_assign_lhs (gs);
1044 else
1045 ref_expr = gimple_assign_rhs1 (gs);
1047 if (!handled_component_p (ref_expr)
1048 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1049 || (TREE_CODE (ref_expr) == COMPONENT_REF
1050 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1051 return;
1053 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1054 &unsignedp, &reversep, &volatilep);
1055 HOST_WIDE_INT cbitpos;
1056 if (reversep || !bitpos.is_constant (&cbitpos))
1057 return;
1058 widest_int index = cbitpos;
1060 if (!restructure_reference (&base, &offset, &index, &type))
1061 return;
1063 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1064 type, sizetype, 0);
1066 /* Add the candidate to the statement-candidate mapping. */
1067 add_cand_for_stmt (gs, c);
1070 /* Create a candidate entry for a statement GS, where GS multiplies
1071 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1072 about the two SSA names into the new candidate. Return the new
1073 candidate. */
1075 static slsr_cand_t
1076 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1078 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1079 tree stype = NULL_TREE;
1080 widest_int index;
1081 unsigned savings = 0;
1082 slsr_cand_t c;
1083 slsr_cand_t base_cand = base_cand_from_table (base_in);
1085 /* Look at all interpretations of the base candidate, if necessary,
1086 to find information to propagate into this candidate. */
1087 while (base_cand && !base && base_cand->kind != CAND_PHI)
1090 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1092 /* Y = (B + i') * 1
1093 X = Y * Z
1094 ================
1095 X = (B + i') * Z */
1096 base = base_cand->base_expr;
1097 index = base_cand->index;
1098 stride = stride_in;
1099 ctype = base_cand->cand_type;
1100 stype = TREE_TYPE (stride_in);
1101 if (has_single_use (base_in))
1102 savings = (base_cand->dead_savings
1103 + stmt_cost (base_cand->cand_stmt, speed));
1105 else if (base_cand->kind == CAND_ADD
1106 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1108 /* Y = B + (i' * S), S constant
1109 X = Y * Z
1110 ============================
1111 X = B + ((i' * S) * Z) */
1112 base = base_cand->base_expr;
1113 index = base_cand->index * wi::to_widest (base_cand->stride);
1114 stride = stride_in;
1115 ctype = base_cand->cand_type;
1116 stype = TREE_TYPE (stride_in);
1117 if (has_single_use (base_in))
1118 savings = (base_cand->dead_savings
1119 + stmt_cost (base_cand->cand_stmt, speed));
1122 if (base_cand->next_interp)
1123 base_cand = lookup_cand (base_cand->next_interp);
1124 else
1125 base_cand = NULL;
1128 if (!base)
1130 /* No interpretations had anything useful to propagate, so
1131 produce X = (Y + 0) * Z. */
1132 base = base_in;
1133 index = 0;
1134 stride = stride_in;
1135 ctype = TREE_TYPE (base_in);
1136 stype = TREE_TYPE (stride_in);
1139 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1140 ctype, stype, savings);
1141 return c;
1144 /* Create a candidate entry for a statement GS, where GS multiplies
1145 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1146 information about BASE_IN into the new candidate. Return the new
1147 candidate. */
1149 static slsr_cand_t
1150 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1152 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1153 widest_int index, temp;
1154 unsigned savings = 0;
1155 slsr_cand_t c;
1156 slsr_cand_t base_cand = base_cand_from_table (base_in);
1158 /* Look at all interpretations of the base candidate, if necessary,
1159 to find information to propagate into this candidate. */
1160 while (base_cand && !base && base_cand->kind != CAND_PHI)
1162 if (base_cand->kind == CAND_MULT
1163 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1165 /* Y = (B + i') * S, S constant
1166 X = Y * c
1167 ============================
1168 X = (B + i') * (S * c) */
1169 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1170 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1172 base = base_cand->base_expr;
1173 index = base_cand->index;
1174 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1175 ctype = base_cand->cand_type;
1176 if (has_single_use (base_in))
1177 savings = (base_cand->dead_savings
1178 + stmt_cost (base_cand->cand_stmt, speed));
1181 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1183 /* Y = B + (i' * 1)
1184 X = Y * c
1185 ===========================
1186 X = (B + i') * c */
1187 base = base_cand->base_expr;
1188 index = base_cand->index;
1189 stride = stride_in;
1190 ctype = base_cand->cand_type;
1191 if (has_single_use (base_in))
1192 savings = (base_cand->dead_savings
1193 + stmt_cost (base_cand->cand_stmt, speed));
1195 else if (base_cand->kind == CAND_ADD
1196 && base_cand->index == 1
1197 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1199 /* Y = B + (1 * S), S constant
1200 X = Y * c
1201 ===========================
1202 X = (B + S) * c */
1203 base = base_cand->base_expr;
1204 index = wi::to_widest (base_cand->stride);
1205 stride = stride_in;
1206 ctype = base_cand->cand_type;
1207 if (has_single_use (base_in))
1208 savings = (base_cand->dead_savings
1209 + stmt_cost (base_cand->cand_stmt, speed));
1212 if (base_cand->next_interp)
1213 base_cand = lookup_cand (base_cand->next_interp);
1214 else
1215 base_cand = NULL;
1218 if (!base)
1220 /* No interpretations had anything useful to propagate, so
1221 produce X = (Y + 0) * c. */
1222 base = base_in;
1223 index = 0;
1224 stride = stride_in;
1225 ctype = TREE_TYPE (base_in);
1228 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1229 ctype, sizetype, savings);
1230 return c;
1233 /* Given GS which is a multiply of scalar integers, make an appropriate
1234 entry in the candidate table. If this is a multiply of two SSA names,
1235 create two CAND_MULT interpretations and attempt to find a basis for
1236 each of them. Otherwise, create a single CAND_MULT and attempt to
1237 find a basis. */
1239 static void
1240 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1242 slsr_cand_t c, c2;
1244 /* If this is a multiply of an SSA name with itself, it is highly
1245 unlikely that we will get a strength reduction opportunity, so
1246 don't record it as a candidate. This simplifies the logic for
1247 finding a basis, so if this is removed that must be considered. */
1248 if (rhs1 == rhs2)
1249 return;
1251 if (TREE_CODE (rhs2) == SSA_NAME)
1253 /* Record an interpretation of this statement in the candidate table
1254 assuming RHS1 is the base expression and RHS2 is the stride. */
1255 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1257 /* Add the first interpretation to the statement-candidate mapping. */
1258 add_cand_for_stmt (gs, c);
1260 /* Record another interpretation of this statement assuming RHS1
1261 is the stride and RHS2 is the base expression. */
1262 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1263 c->next_interp = c2->cand_num;
1265 else if (TREE_CODE (rhs2) == INTEGER_CST)
1267 /* Record an interpretation for the multiply-immediate. */
1268 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1270 /* Add the interpretation to the statement-candidate mapping. */
1271 add_cand_for_stmt (gs, c);
1275 /* Create a candidate entry for a statement GS, where GS adds two
1276 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1277 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1278 information about the two SSA names into the new candidate.
1279 Return the new candidate. */
1281 static slsr_cand_t
1282 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1283 bool subtract_p, bool speed)
1285 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1286 tree stype = NULL_TREE;
1287 widest_int index;
1288 unsigned savings = 0;
1289 slsr_cand_t c;
1290 slsr_cand_t base_cand = base_cand_from_table (base_in);
1291 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1293 /* The most useful transformation is a multiply-immediate feeding
1294 an add or subtract. Look for that first. */
1295 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1297 if (addend_cand->kind == CAND_MULT
1298 && addend_cand->index == 0
1299 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1301 /* Z = (B + 0) * S, S constant
1302 X = Y +/- Z
1303 ===========================
1304 X = Y + ((+/-1 * S) * B) */
1305 base = base_in;
1306 index = wi::to_widest (addend_cand->stride);
1307 if (subtract_p)
1308 index = -index;
1309 stride = addend_cand->base_expr;
1310 ctype = TREE_TYPE (base_in);
1311 stype = addend_cand->cand_type;
1312 if (has_single_use (addend_in))
1313 savings = (addend_cand->dead_savings
1314 + stmt_cost (addend_cand->cand_stmt, speed));
1317 if (addend_cand->next_interp)
1318 addend_cand = lookup_cand (addend_cand->next_interp);
1319 else
1320 addend_cand = NULL;
1323 while (base_cand && !base && base_cand->kind != CAND_PHI)
1325 if (base_cand->kind == CAND_ADD
1326 && (base_cand->index == 0
1327 || operand_equal_p (base_cand->stride,
1328 integer_zero_node, 0)))
1330 /* Y = B + (i' * S), i' * S = 0
1331 X = Y +/- Z
1332 ============================
1333 X = B + (+/-1 * Z) */
1334 base = base_cand->base_expr;
1335 index = subtract_p ? -1 : 1;
1336 stride = addend_in;
1337 ctype = base_cand->cand_type;
1338 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1339 : TREE_TYPE (addend_in));
1340 if (has_single_use (base_in))
1341 savings = (base_cand->dead_savings
1342 + stmt_cost (base_cand->cand_stmt, speed));
1344 else if (subtract_p)
1346 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1348 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1350 if (subtrahend_cand->kind == CAND_MULT
1351 && subtrahend_cand->index == 0
1352 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1354 /* Z = (B + 0) * S, S constant
1355 X = Y - Z
1356 ===========================
1357 Value: X = Y + ((-1 * S) * B) */
1358 base = base_in;
1359 index = wi::to_widest (subtrahend_cand->stride);
1360 index = -index;
1361 stride = subtrahend_cand->base_expr;
1362 ctype = TREE_TYPE (base_in);
1363 stype = subtrahend_cand->cand_type;
1364 if (has_single_use (addend_in))
1365 savings = (subtrahend_cand->dead_savings
1366 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1369 if (subtrahend_cand->next_interp)
1370 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1371 else
1372 subtrahend_cand = NULL;
1376 if (base_cand->next_interp)
1377 base_cand = lookup_cand (base_cand->next_interp);
1378 else
1379 base_cand = NULL;
1382 if (!base)
1384 /* No interpretations had anything useful to propagate, so
1385 produce X = Y + (1 * Z). */
1386 base = base_in;
1387 index = subtract_p ? -1 : 1;
1388 stride = addend_in;
1389 ctype = TREE_TYPE (base_in);
1390 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1391 : TREE_TYPE (addend_in));
1394 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1395 ctype, stype, savings);
1396 return c;
1399 /* Create a candidate entry for a statement GS, where GS adds SSA
1400 name BASE_IN to constant INDEX_IN. Propagate any known information
1401 about BASE_IN into the new candidate. Return the new candidate. */
1403 static slsr_cand_t
1404 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1405 bool speed)
1407 enum cand_kind kind = CAND_ADD;
1408 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1409 tree stype = NULL_TREE;
1410 widest_int index, multiple;
1411 unsigned savings = 0;
1412 slsr_cand_t c;
1413 slsr_cand_t base_cand = base_cand_from_table (base_in);
1415 while (base_cand && !base && base_cand->kind != CAND_PHI)
1417 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1419 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1420 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1421 sign, &multiple))
1423 /* Y = (B + i') * S, S constant, c = kS for some integer k
1424 X = Y + c
1425 ============================
1426 X = (B + (i'+ k)) * S
1428 Y = B + (i' * S), S constant, c = kS for some integer k
1429 X = Y + c
1430 ============================
1431 X = (B + (i'+ k)) * S */
1432 kind = base_cand->kind;
1433 base = base_cand->base_expr;
1434 index = base_cand->index + multiple;
1435 stride = base_cand->stride;
1436 ctype = base_cand->cand_type;
1437 stype = base_cand->stride_type;
1438 if (has_single_use (base_in))
1439 savings = (base_cand->dead_savings
1440 + stmt_cost (base_cand->cand_stmt, speed));
1443 if (base_cand->next_interp)
1444 base_cand = lookup_cand (base_cand->next_interp);
1445 else
1446 base_cand = NULL;
1449 if (!base)
1451 /* No interpretations had anything useful to propagate, so
1452 produce X = Y + (c * 1). */
1453 kind = CAND_ADD;
1454 base = base_in;
1455 index = index_in;
1456 stride = integer_one_node;
1457 ctype = TREE_TYPE (base_in);
1458 stype = sizetype;
1461 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1462 ctype, stype, savings);
1463 return c;
1466 /* Given GS which is an add or subtract of scalar integers or pointers,
1467 make at least one appropriate entry in the candidate table. */
1469 static void
1470 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1472 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1473 slsr_cand_t c = NULL, c2;
1475 if (TREE_CODE (rhs2) == SSA_NAME)
1477 /* First record an interpretation assuming RHS1 is the base expression
1478 and RHS2 is the stride. But it doesn't make sense for the
1479 stride to be a pointer, so don't record a candidate in that case. */
1480 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1482 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1484 /* Add the first interpretation to the statement-candidate
1485 mapping. */
1486 add_cand_for_stmt (gs, c);
1489 /* If the two RHS operands are identical, or this is a subtract,
1490 we're done. */
1491 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1492 return;
1494 /* Otherwise, record another interpretation assuming RHS2 is the
1495 base expression and RHS1 is the stride, again provided that the
1496 stride is not a pointer. */
1497 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1499 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1500 if (c)
1501 c->next_interp = c2->cand_num;
1502 else
1503 add_cand_for_stmt (gs, c2);
1506 else if (TREE_CODE (rhs2) == INTEGER_CST)
1508 /* Record an interpretation for the add-immediate. */
1509 widest_int index = wi::to_widest (rhs2);
1510 if (subtract_p)
1511 index = -index;
1513 c = create_add_imm_cand (gs, rhs1, index, speed);
1515 /* Add the interpretation to the statement-candidate mapping. */
1516 add_cand_for_stmt (gs, c);
1520 /* Given GS which is a negate of a scalar integer, make an appropriate
1521 entry in the candidate table. A negate is equivalent to a multiply
1522 by -1. */
1524 static void
1525 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1527 /* Record a CAND_MULT interpretation for the multiply by -1. */
1528 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1530 /* Add the interpretation to the statement-candidate mapping. */
1531 add_cand_for_stmt (gs, c);
1534 /* Help function for legal_cast_p, operating on two trees. Checks
1535 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1536 for more details. */
1538 static bool
1539 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1541 unsigned lhs_size, rhs_size;
1542 bool lhs_wraps, rhs_wraps;
1544 lhs_size = TYPE_PRECISION (lhs_type);
1545 rhs_size = TYPE_PRECISION (rhs_type);
1546 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1547 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1549 if (lhs_size < rhs_size
1550 || (rhs_wraps && !lhs_wraps)
1551 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1552 return false;
1554 return true;
1557 /* Return TRUE if GS is a statement that defines an SSA name from
1558 a conversion and is legal for us to combine with an add and multiply
1559 in the candidate table. For example, suppose we have:
1561 A = B + i;
1562 C = (type) A;
1563 D = C * S;
1565 Without the type-cast, we would create a CAND_MULT for D with base B,
1566 index i, and stride S. We want to record this candidate only if it
1567 is equivalent to apply the type cast following the multiply:
1569 A = B + i;
1570 E = A * S;
1571 D = (type) E;
1573 We will record the type with the candidate for D. This allows us
1574 to use a similar previous candidate as a basis. If we have earlier seen
1576 A' = B + i';
1577 C' = (type) A';
1578 D' = C' * S;
1580 we can replace D with
1582 D = D' + (i - i') * S;
1584 But if moving the type-cast would change semantics, we mustn't do this.
1586 This is legitimate for casts from a non-wrapping integral type to
1587 any integral type of the same or larger size. It is not legitimate
1588 to convert a wrapping type to a non-wrapping type, or to a wrapping
1589 type of a different size. I.e., with a wrapping type, we must
1590 assume that the addition B + i could wrap, in which case performing
1591 the multiply before or after one of the "illegal" type casts will
1592 have different semantics. */
1594 static bool
1595 legal_cast_p (gimple *gs, tree rhs)
1597 if (!is_gimple_assign (gs)
1598 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1599 return false;
1601 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1604 /* Given GS which is a cast to a scalar integer type, determine whether
1605 the cast is legal for strength reduction. If so, make at least one
1606 appropriate entry in the candidate table. */
1608 static void
1609 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1611 tree lhs, ctype;
1612 slsr_cand_t base_cand, c = NULL, c2;
1613 unsigned savings = 0;
1615 if (!legal_cast_p (gs, rhs1))
1616 return;
1618 lhs = gimple_assign_lhs (gs);
1619 base_cand = base_cand_from_table (rhs1);
1620 ctype = TREE_TYPE (lhs);
1622 if (base_cand && base_cand->kind != CAND_PHI)
1624 while (base_cand)
1626 /* Propagate all data from the base candidate except the type,
1627 which comes from the cast, and the base candidate's cast,
1628 which is no longer applicable. */
1629 if (has_single_use (rhs1))
1630 savings = (base_cand->dead_savings
1631 + stmt_cost (base_cand->cand_stmt, speed));
1633 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1634 base_cand->base_expr,
1635 base_cand->index, base_cand->stride,
1636 ctype, base_cand->stride_type,
1637 savings);
1638 if (base_cand->next_interp)
1639 base_cand = lookup_cand (base_cand->next_interp);
1640 else
1641 base_cand = NULL;
1644 else
1646 /* If nothing is known about the RHS, create fresh CAND_ADD and
1647 CAND_MULT interpretations:
1649 X = Y + (0 * 1)
1650 X = (Y + 0) * 1
1652 The first of these is somewhat arbitrary, but the choice of
1653 1 for the stride simplifies the logic for propagating casts
1654 into their uses. */
1655 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1656 integer_one_node, ctype, sizetype, 0);
1657 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1658 integer_one_node, ctype, sizetype, 0);
1659 c->next_interp = c2->cand_num;
1662 /* Add the first (or only) interpretation to the statement-candidate
1663 mapping. */
1664 add_cand_for_stmt (gs, c);
1667 /* Given GS which is a copy of a scalar integer type, make at least one
1668 appropriate entry in the candidate table.
1670 This interface is included for completeness, but is unnecessary
1671 if this pass immediately follows a pass that performs copy
1672 propagation, such as DOM. */
1674 static void
1675 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1677 slsr_cand_t base_cand, c = NULL, c2;
1678 unsigned savings = 0;
1680 base_cand = base_cand_from_table (rhs1);
1682 if (base_cand && base_cand->kind != CAND_PHI)
1684 while (base_cand)
1686 /* Propagate all data from the base candidate. */
1687 if (has_single_use (rhs1))
1688 savings = (base_cand->dead_savings
1689 + stmt_cost (base_cand->cand_stmt, speed));
1691 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1692 base_cand->base_expr,
1693 base_cand->index, base_cand->stride,
1694 base_cand->cand_type,
1695 base_cand->stride_type, savings);
1696 if (base_cand->next_interp)
1697 base_cand = lookup_cand (base_cand->next_interp);
1698 else
1699 base_cand = NULL;
1702 else
1704 /* If nothing is known about the RHS, create fresh CAND_ADD and
1705 CAND_MULT interpretations:
1707 X = Y + (0 * 1)
1708 X = (Y + 0) * 1
1710 The first of these is somewhat arbitrary, but the choice of
1711 1 for the stride simplifies the logic for propagating casts
1712 into their uses. */
1713 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1714 integer_one_node, TREE_TYPE (rhs1),
1715 sizetype, 0);
1716 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1717 integer_one_node, TREE_TYPE (rhs1),
1718 sizetype, 0);
1719 c->next_interp = c2->cand_num;
1722 /* Add the first (or only) interpretation to the statement-candidate
1723 mapping. */
1724 add_cand_for_stmt (gs, c);
1727 class find_candidates_dom_walker : public dom_walker
1729 public:
1730 find_candidates_dom_walker (cdi_direction direction)
1731 : dom_walker (direction) {}
1732 virtual edge before_dom_children (basic_block);
1735 /* Find strength-reduction candidates in block BB. */
1737 edge
1738 find_candidates_dom_walker::before_dom_children (basic_block bb)
1740 bool speed = optimize_bb_for_speed_p (bb);
1742 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1743 gsi_next (&gsi))
1744 slsr_process_phi (gsi.phi (), speed);
1746 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1747 gsi_next (&gsi))
1749 gimple *gs = gsi_stmt (gsi);
1751 if (stmt_could_throw_p (gs))
1752 continue;
1754 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1755 slsr_process_ref (gs);
1757 else if (is_gimple_assign (gs)
1758 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1759 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1761 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1763 switch (gimple_assign_rhs_code (gs))
1765 case MULT_EXPR:
1766 case PLUS_EXPR:
1767 rhs1 = gimple_assign_rhs1 (gs);
1768 rhs2 = gimple_assign_rhs2 (gs);
1769 /* Should never happen, but currently some buggy situations
1770 in earlier phases put constants in rhs1. */
1771 if (TREE_CODE (rhs1) != SSA_NAME)
1772 continue;
1773 break;
1775 /* Possible future opportunity: rhs1 of a ptr+ can be
1776 an ADDR_EXPR. */
1777 case POINTER_PLUS_EXPR:
1778 case MINUS_EXPR:
1779 rhs2 = gimple_assign_rhs2 (gs);
1780 gcc_fallthrough ();
1782 CASE_CONVERT:
1783 case SSA_NAME:
1784 case NEGATE_EXPR:
1785 rhs1 = gimple_assign_rhs1 (gs);
1786 if (TREE_CODE (rhs1) != SSA_NAME)
1787 continue;
1788 break;
1790 default:
1794 switch (gimple_assign_rhs_code (gs))
1796 case MULT_EXPR:
1797 slsr_process_mul (gs, rhs1, rhs2, speed);
1798 break;
1800 case PLUS_EXPR:
1801 case POINTER_PLUS_EXPR:
1802 case MINUS_EXPR:
1803 slsr_process_add (gs, rhs1, rhs2, speed);
1804 break;
1806 case NEGATE_EXPR:
1807 slsr_process_neg (gs, rhs1, speed);
1808 break;
1810 CASE_CONVERT:
1811 slsr_process_cast (gs, rhs1, speed);
1812 break;
1814 case SSA_NAME:
1815 slsr_process_copy (gs, rhs1, speed);
1816 break;
1818 default:
1823 return NULL;
1826 /* Dump a candidate for debug. */
1828 static void
1829 dump_candidate (slsr_cand_t c)
1831 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1832 gimple_bb (c->cand_stmt)->index);
1833 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1834 switch (c->kind)
1836 case CAND_MULT:
1837 fputs (" MULT : (", dump_file);
1838 print_generic_expr (dump_file, c->base_expr);
1839 fputs (" + ", dump_file);
1840 print_decs (c->index, dump_file);
1841 fputs (") * ", dump_file);
1842 if (TREE_CODE (c->stride) != INTEGER_CST
1843 && c->stride_type != TREE_TYPE (c->stride))
1845 fputs ("(", dump_file);
1846 print_generic_expr (dump_file, c->stride_type);
1847 fputs (")", dump_file);
1849 print_generic_expr (dump_file, c->stride);
1850 fputs (" : ", dump_file);
1851 break;
1852 case CAND_ADD:
1853 fputs (" ADD : ", dump_file);
1854 print_generic_expr (dump_file, c->base_expr);
1855 fputs (" + (", dump_file);
1856 print_decs (c->index, dump_file);
1857 fputs (" * ", dump_file);
1858 if (TREE_CODE (c->stride) != INTEGER_CST
1859 && c->stride_type != TREE_TYPE (c->stride))
1861 fputs ("(", dump_file);
1862 print_generic_expr (dump_file, c->stride_type);
1863 fputs (")", dump_file);
1865 print_generic_expr (dump_file, c->stride);
1866 fputs (") : ", dump_file);
1867 break;
1868 case CAND_REF:
1869 fputs (" REF : ", dump_file);
1870 print_generic_expr (dump_file, c->base_expr);
1871 fputs (" + (", dump_file);
1872 print_generic_expr (dump_file, c->stride);
1873 fputs (") + ", dump_file);
1874 print_decs (c->index, dump_file);
1875 fputs (" : ", dump_file);
1876 break;
1877 case CAND_PHI:
1878 fputs (" PHI : ", dump_file);
1879 print_generic_expr (dump_file, c->base_expr);
1880 fputs (" + (unknown * ", dump_file);
1881 print_generic_expr (dump_file, c->stride);
1882 fputs (") : ", dump_file);
1883 break;
1884 default:
1885 gcc_unreachable ();
1887 print_generic_expr (dump_file, c->cand_type);
1888 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1889 c->basis, c->dependent, c->sibling);
1890 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1891 c->next_interp, c->dead_savings);
1892 if (c->def_phi)
1893 fprintf (dump_file, " phi: %d\n", c->def_phi);
1894 fputs ("\n", dump_file);
1897 /* Dump the candidate vector for debug. */
1899 static void
1900 dump_cand_vec (void)
1902 unsigned i;
1903 slsr_cand_t c;
1905 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1907 FOR_EACH_VEC_ELT (cand_vec, i, c)
1908 dump_candidate (c);
1911 /* Callback used to dump the candidate chains hash table. */
1914 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1916 const_cand_chain_t chain = *slot;
1917 cand_chain_t p;
1919 print_generic_expr (dump_file, chain->base_expr);
1920 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1922 for (p = chain->next; p; p = p->next)
1923 fprintf (dump_file, " -> %d", p->cand->cand_num);
1925 fputs ("\n", dump_file);
1926 return 1;
1929 /* Dump the candidate chains. */
1931 static void
1932 dump_cand_chains (void)
1934 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1935 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1936 (NULL);
1937 fputs ("\n", dump_file);
1940 /* Dump the increment vector for debug. */
1942 static void
1943 dump_incr_vec (void)
1945 if (dump_file && (dump_flags & TDF_DETAILS))
1947 unsigned i;
1949 fprintf (dump_file, "\nIncrement vector:\n\n");
1951 for (i = 0; i < incr_vec_len; i++)
1953 fprintf (dump_file, "%3d increment: ", i);
1954 print_decs (incr_vec[i].incr, dump_file);
1955 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1956 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1957 fputs ("\n initializer: ", dump_file);
1958 print_generic_expr (dump_file, incr_vec[i].initializer);
1959 fputs ("\n\n", dump_file);
1964 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1965 data reference. */
1967 static void
1968 replace_ref (tree *expr, slsr_cand_t c)
1970 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1971 unsigned HOST_WIDE_INT misalign;
1972 unsigned align;
1974 /* Ensure the memory reference carries the minimum alignment
1975 requirement for the data type. See PR58041. */
1976 get_object_alignment_1 (*expr, &align, &misalign);
1977 if (misalign != 0)
1978 align = least_bit_hwi (misalign);
1979 if (align < TYPE_ALIGN (acc_type))
1980 acc_type = build_aligned_type (acc_type, align);
1982 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1983 c->base_expr, c->stride);
1984 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1985 wide_int_to_tree (c->cand_type, c->index));
1987 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1988 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1989 TREE_OPERAND (mem_ref, 0)
1990 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1991 /*simple_p=*/true, NULL,
1992 /*before=*/true, GSI_SAME_STMT);
1993 copy_ref_info (mem_ref, *expr);
1994 *expr = mem_ref;
1995 update_stmt (c->cand_stmt);
1998 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1999 dependent of candidate C with an equivalent strength-reduced data
2000 reference. */
2002 static void
2003 replace_refs (slsr_cand_t c)
2005 if (dump_file && (dump_flags & TDF_DETAILS))
2007 fputs ("Replacing reference: ", dump_file);
2008 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2011 if (gimple_vdef (c->cand_stmt))
2013 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2014 replace_ref (lhs, c);
2016 else
2018 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2019 replace_ref (rhs, c);
2022 if (dump_file && (dump_flags & TDF_DETAILS))
2024 fputs ("With: ", dump_file);
2025 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2026 fputs ("\n", dump_file);
2029 if (c->sibling)
2030 replace_refs (lookup_cand (c->sibling));
2032 if (c->dependent)
2033 replace_refs (lookup_cand (c->dependent));
2036 /* Return TRUE if candidate C is dependent upon a PHI. */
2038 static bool
2039 phi_dependent_cand_p (slsr_cand_t c)
2041 /* A candidate is not necessarily dependent upon a PHI just because
2042 it has a phi definition for its base name. It may have a basis
2043 that relies upon the same phi definition, in which case the PHI
2044 is irrelevant to this candidate. */
2045 return (c->def_phi
2046 && c->basis
2047 && lookup_cand (c->basis)->def_phi != c->def_phi);
2050 /* Calculate the increment required for candidate C relative to
2051 its basis. */
2053 static widest_int
2054 cand_increment (slsr_cand_t c)
2056 slsr_cand_t basis;
2058 /* If the candidate doesn't have a basis, just return its own
2059 index. This is useful in record_increments to help us find
2060 an existing initializer. Also, if the candidate's basis is
2061 hidden by a phi, then its own index will be the increment
2062 from the newly introduced phi basis. */
2063 if (!c->basis || phi_dependent_cand_p (c))
2064 return c->index;
2066 basis = lookup_cand (c->basis);
2067 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2068 return c->index - basis->index;
2071 /* Calculate the increment required for candidate C relative to
2072 its basis. If we aren't going to generate pointer arithmetic
2073 for this candidate, return the absolute value of that increment
2074 instead. */
2076 static inline widest_int
2077 cand_abs_increment (slsr_cand_t c)
2079 widest_int increment = cand_increment (c);
2081 if (!address_arithmetic_p && wi::neg_p (increment))
2082 increment = -increment;
2084 return increment;
2087 /* Return TRUE iff candidate C has already been replaced under
2088 another interpretation. */
2090 static inline bool
2091 cand_already_replaced (slsr_cand_t c)
2093 return (gimple_bb (c->cand_stmt) == 0);
2096 /* Common logic used by replace_unconditional_candidate and
2097 replace_conditional_candidate. */
2099 static void
2100 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2102 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2103 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2105 /* It is not useful to replace casts, copies, negates, or adds of
2106 an SSA name and a constant. */
2107 if (cand_code == SSA_NAME
2108 || CONVERT_EXPR_CODE_P (cand_code)
2109 || cand_code == PLUS_EXPR
2110 || cand_code == POINTER_PLUS_EXPR
2111 || cand_code == MINUS_EXPR
2112 || cand_code == NEGATE_EXPR)
2113 return;
2115 enum tree_code code = PLUS_EXPR;
2116 tree bump_tree;
2117 gimple *stmt_to_print = NULL;
2119 if (wi::neg_p (bump))
2121 code = MINUS_EXPR;
2122 bump = -bump;
2125 /* It is possible that the resulting bump doesn't fit in target_type.
2126 Abandon the replacement in this case. This does not affect
2127 siblings or dependents of C. */
2128 if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2129 TYPE_SIGN (target_type)))
2130 return;
2132 bump_tree = wide_int_to_tree (target_type, bump);
2134 /* If the basis name and the candidate's LHS have incompatible types,
2135 introduce a cast. */
2136 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2137 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2139 if (dump_file && (dump_flags & TDF_DETAILS))
2141 fputs ("Replacing: ", dump_file);
2142 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2145 if (bump == 0)
2147 tree lhs = gimple_assign_lhs (c->cand_stmt);
2148 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2149 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2150 slsr_cand_t cc = c;
2151 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2152 gsi_replace (&gsi, copy_stmt, false);
2153 c->cand_stmt = copy_stmt;
2154 while (cc->next_interp)
2156 cc = lookup_cand (cc->next_interp);
2157 cc->cand_stmt = copy_stmt;
2159 if (dump_file && (dump_flags & TDF_DETAILS))
2160 stmt_to_print = copy_stmt;
2162 else
2164 tree rhs1, rhs2;
2165 if (cand_code != NEGATE_EXPR) {
2166 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2167 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2169 if (cand_code != NEGATE_EXPR
2170 && ((operand_equal_p (rhs1, basis_name, 0)
2171 && operand_equal_p (rhs2, bump_tree, 0))
2172 || (operand_equal_p (rhs1, bump_tree, 0)
2173 && operand_equal_p (rhs2, basis_name, 0))))
2175 if (dump_file && (dump_flags & TDF_DETAILS))
2177 fputs ("(duplicate, not actually replacing)", dump_file);
2178 stmt_to_print = c->cand_stmt;
2181 else
2183 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2184 slsr_cand_t cc = c;
2185 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2186 update_stmt (gsi_stmt (gsi));
2187 c->cand_stmt = gsi_stmt (gsi);
2188 while (cc->next_interp)
2190 cc = lookup_cand (cc->next_interp);
2191 cc->cand_stmt = gsi_stmt (gsi);
2193 if (dump_file && (dump_flags & TDF_DETAILS))
2194 stmt_to_print = gsi_stmt (gsi);
2198 if (dump_file && (dump_flags & TDF_DETAILS))
2200 fputs ("With: ", dump_file);
2201 print_gimple_stmt (dump_file, stmt_to_print, 0);
2202 fputs ("\n", dump_file);
2206 /* Replace candidate C with an add or subtract. Note that we only
2207 operate on CAND_MULTs with known strides, so we will never generate
2208 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2209 X = Y + ((i - i') * S), as described in the module commentary. The
2210 folded value ((i - i') * S) is referred to here as the "bump." */
2212 static void
2213 replace_unconditional_candidate (slsr_cand_t c)
2215 slsr_cand_t basis;
2217 if (cand_already_replaced (c))
2218 return;
2220 basis = lookup_cand (c->basis);
2221 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2223 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2226 /* Return the index in the increment vector of the given INCREMENT,
2227 or -1 if not found. The latter can occur if more than
2228 MAX_INCR_VEC_LEN increments have been found. */
2230 static inline int
2231 incr_vec_index (const widest_int &increment)
2233 unsigned i;
2235 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2238 if (i < incr_vec_len)
2239 return i;
2240 else
2241 return -1;
2244 /* Create a new statement along edge E to add BASIS_NAME to the product
2245 of INCREMENT and the stride of candidate C. Create and return a new
2246 SSA name from *VAR to be used as the LHS of the new statement.
2247 KNOWN_STRIDE is true iff C's stride is a constant. */
2249 static tree
2250 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2251 widest_int increment, edge e, location_t loc,
2252 bool known_stride)
2254 tree lhs, basis_type;
2255 gassign *new_stmt, *cast_stmt = NULL;
2257 /* If the add candidate along this incoming edge has the same
2258 index as C's hidden basis, the hidden basis represents this
2259 edge correctly. */
2260 if (increment == 0)
2261 return basis_name;
2263 basis_type = TREE_TYPE (basis_name);
2264 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2266 /* Occasionally people convert integers to pointers without a
2267 cast, leading us into trouble if we aren't careful. */
2268 enum tree_code plus_code
2269 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2271 if (known_stride)
2273 tree bump_tree;
2274 enum tree_code code = plus_code;
2275 widest_int bump = increment * wi::to_widest (c->stride);
2276 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2278 code = MINUS_EXPR;
2279 bump = -bump;
2282 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2283 bump_tree = wide_int_to_tree (stride_type, bump);
2284 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2286 else
2288 int i;
2289 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2290 i = incr_vec_index (negate_incr ? -increment : increment);
2291 gcc_assert (i >= 0);
2293 if (incr_vec[i].initializer)
2295 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2296 new_stmt = gimple_build_assign (lhs, code, basis_name,
2297 incr_vec[i].initializer);
2299 else {
2300 tree stride;
2302 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2304 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2305 "slsr");
2306 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2307 c->stride);
2308 stride = cast_stride;
2310 else
2311 stride = c->stride;
2313 if (increment == 1)
2314 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2315 else if (increment == -1)
2316 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2317 else
2318 gcc_unreachable ();
2322 if (cast_stmt)
2324 gimple_set_location (cast_stmt, loc);
2325 gsi_insert_on_edge (e, cast_stmt);
2328 gimple_set_location (new_stmt, loc);
2329 gsi_insert_on_edge (e, new_stmt);
2331 if (dump_file && (dump_flags & TDF_DETAILS))
2333 if (cast_stmt)
2335 fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2336 e->src->index, e->dest->index);
2337 print_gimple_stmt (dump_file, cast_stmt, 0);
2339 fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2340 e->dest->index);
2341 print_gimple_stmt (dump_file, new_stmt, 0);
2344 return lhs;
2347 /* Clear the visited field for a tree of PHI candidates. */
2349 static void
2350 clear_visited (gphi *phi)
2352 unsigned i;
2353 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2355 if (phi_cand->visited)
2357 phi_cand->visited = 0;
2359 for (i = 0; i < gimple_phi_num_args (phi); i++)
2361 tree arg = gimple_phi_arg_def (phi, i);
2362 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2363 if (gimple_code (arg_def) == GIMPLE_PHI)
2364 clear_visited (as_a <gphi *> (arg_def));
2369 /* Recursive helper function for create_phi_basis. */
2371 static tree
2372 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2373 location_t loc, bool known_stride)
2375 int i;
2376 tree name, phi_arg;
2377 gphi *phi;
2378 slsr_cand_t basis = lookup_cand (c->basis);
2379 int nargs = gimple_phi_num_args (from_phi);
2380 basic_block phi_bb = gimple_bb (from_phi);
2381 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2382 auto_vec<tree> phi_args (nargs);
2384 if (phi_cand->visited)
2385 return phi_cand->cached_basis;
2386 phi_cand->visited = 1;
2388 /* Process each argument of the existing phi that represents
2389 conditionally-executed add candidates. */
2390 for (i = 0; i < nargs; i++)
2392 edge e = (*phi_bb->preds)[i];
2393 tree arg = gimple_phi_arg_def (from_phi, i);
2394 tree feeding_def;
2396 /* If the phi argument is the base name of the CAND_PHI, then
2397 this incoming arc should use the hidden basis. */
2398 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2399 if (basis->index == 0)
2400 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2401 else
2403 widest_int incr = -basis->index;
2404 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2405 e, loc, known_stride);
2407 else
2409 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2411 /* If there is another phi along this incoming edge, we must
2412 process it in the same fashion to ensure that all basis
2413 adjustments are made along its incoming edges. */
2414 if (gimple_code (arg_def) == GIMPLE_PHI)
2415 feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2416 loc, known_stride);
2417 else
2419 slsr_cand_t arg_cand = base_cand_from_table (arg);
2420 widest_int diff = arg_cand->index - basis->index;
2421 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2422 e, loc, known_stride);
2426 /* Because of recursion, we need to save the arguments in a vector
2427 so we can create the PHI statement all at once. Otherwise the
2428 storage for the half-created PHI can be reclaimed. */
2429 phi_args.safe_push (feeding_def);
2432 /* Create the new phi basis. */
2433 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2434 phi = create_phi_node (name, phi_bb);
2435 SSA_NAME_DEF_STMT (name) = phi;
2437 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2439 edge e = (*phi_bb->preds)[i];
2440 add_phi_arg (phi, phi_arg, e, loc);
2443 update_stmt (phi);
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2447 fputs ("Introducing new phi basis: ", dump_file);
2448 print_gimple_stmt (dump_file, phi, 0);
2451 phi_cand->cached_basis = name;
2452 return name;
2455 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2456 is hidden by the phi node FROM_PHI, create a new phi node in the same
2457 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2458 with its phi arguments representing conditional adjustments to the
2459 hidden basis along conditional incoming paths. Those adjustments are
2460 made by creating add statements (and sometimes recursively creating
2461 phis) along those incoming paths. LOC is the location to attach to
2462 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2463 constant. */
2465 static tree
2466 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2467 location_t loc, bool known_stride)
2469 tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2470 known_stride);
2471 gcc_assert (retval);
2472 clear_visited (as_a <gphi *> (from_phi));
2473 return retval;
2476 /* Given a candidate C whose basis is hidden by at least one intervening
2477 phi, introduce a matching number of new phis to represent its basis
2478 adjusted by conditional increments along possible incoming paths. Then
2479 replace C as though it were an unconditional candidate, using the new
2480 basis. */
2482 static void
2483 replace_conditional_candidate (slsr_cand_t c)
2485 tree basis_name, name;
2486 slsr_cand_t basis;
2487 location_t loc;
2489 /* Look up the LHS SSA name from C's basis. This will be the
2490 RHS1 of the adds we will introduce to create new phi arguments. */
2491 basis = lookup_cand (c->basis);
2492 basis_name = gimple_assign_lhs (basis->cand_stmt);
2494 /* Create a new phi statement which will represent C's true basis
2495 after the transformation is complete. */
2496 loc = gimple_location (c->cand_stmt);
2497 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2498 basis_name, loc, KNOWN_STRIDE);
2500 /* Replace C with an add of the new basis phi and a constant. */
2501 widest_int bump = c->index * wi::to_widest (c->stride);
2503 replace_mult_candidate (c, name, bump);
2506 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2507 how many PHI nodes we have visited at this point in the tree walk. */
2509 static int
2510 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2512 unsigned i;
2513 int cost = 0;
2514 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2516 if (phi_cand->visited)
2517 return 0;
2519 phi_cand->visited = 1;
2520 (*spread)++;
2522 /* If we work our way back to a phi that isn't dominated by the hidden
2523 basis, this isn't a candidate for replacement. Indicate this by
2524 returning an unreasonably high cost. It's not easy to detect
2525 these situations when determining the basis, so we defer the
2526 decision until now. */
2527 basic_block phi_bb = gimple_bb (phi);
2528 slsr_cand_t basis = lookup_cand (c->basis);
2529 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2531 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2532 return COST_INFINITE;
2534 for (i = 0; i < gimple_phi_num_args (phi); i++)
2536 tree arg = gimple_phi_arg_def (phi, i);
2538 if (arg != phi_cand->base_expr)
2540 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2542 if (gimple_code (arg_def) == GIMPLE_PHI)
2544 cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2546 if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2547 return COST_INFINITE;
2549 else
2551 slsr_cand_t arg_cand = base_cand_from_table (arg);
2553 if (arg_cand->index != c->index)
2554 cost += one_add_cost;
2559 return cost;
2562 /* Compute the expected costs of inserting basis adjustments for
2563 candidate C with phi-definition PHI. The cost of inserting
2564 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2565 which are themselves phi results, recursively calculate costs
2566 for those phis as well. */
2568 static int
2569 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2571 int spread = 0;
2572 int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2573 clear_visited (as_a <gphi *> (phi));
2574 return retval;
2576 /* For candidate C, each sibling of candidate C, and each dependent of
2577 candidate C, determine whether the candidate is dependent upon a
2578 phi that hides its basis. If not, replace the candidate unconditionally.
2579 Otherwise, determine whether the cost of introducing compensation code
2580 for the candidate is offset by the gains from strength reduction. If
2581 so, replace the candidate and introduce the compensation code. */
2583 static void
2584 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2586 if (phi_dependent_cand_p (c))
2588 /* A multiply candidate with a stride of 1 is just an artifice
2589 of a copy or cast; there is no value in replacing it. */
2590 if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2592 /* A candidate dependent upon a phi will replace a multiply by
2593 a constant with an add, and will insert at most one add for
2594 each phi argument. Add these costs with the potential dead-code
2595 savings to determine profitability. */
2596 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2597 int mult_savings = stmt_cost (c->cand_stmt, speed);
2598 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2599 tree phi_result = gimple_phi_result (phi);
2600 int one_add_cost = add_cost (speed,
2601 TYPE_MODE (TREE_TYPE (phi_result)));
2602 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2603 int cost = add_costs - mult_savings - c->dead_savings;
2605 if (dump_file && (dump_flags & TDF_DETAILS))
2607 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2608 fprintf (dump_file, " add_costs = %d\n", add_costs);
2609 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2610 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2611 fprintf (dump_file, " cost = %d\n", cost);
2612 if (cost <= COST_NEUTRAL)
2613 fputs (" Replacing...\n", dump_file);
2614 else
2615 fputs (" Not replaced.\n", dump_file);
2618 if (cost <= COST_NEUTRAL)
2619 replace_conditional_candidate (c);
2622 else
2623 replace_unconditional_candidate (c);
2625 if (c->sibling)
2626 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2628 if (c->dependent)
2629 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2632 /* Count the number of candidates in the tree rooted at C that have
2633 not already been replaced under other interpretations. */
2635 static int
2636 count_candidates (slsr_cand_t c)
2638 unsigned count = cand_already_replaced (c) ? 0 : 1;
2640 if (c->sibling)
2641 count += count_candidates (lookup_cand (c->sibling));
2643 if (c->dependent)
2644 count += count_candidates (lookup_cand (c->dependent));
2646 return count;
2649 /* Increase the count of INCREMENT by one in the increment vector.
2650 INCREMENT is associated with candidate C. If INCREMENT is to be
2651 conditionally executed as part of a conditional candidate replacement,
2652 IS_PHI_ADJUST is true, otherwise false. If an initializer
2653 T_0 = stride * I is provided by a candidate that dominates all
2654 candidates with the same increment, also record T_0 for subsequent use. */
2656 static void
2657 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2659 bool found = false;
2660 unsigned i;
2662 /* Treat increments that differ only in sign as identical so as to
2663 share initializers, unless we are generating pointer arithmetic. */
2664 if (!address_arithmetic_p && wi::neg_p (increment))
2665 increment = -increment;
2667 for (i = 0; i < incr_vec_len; i++)
2669 if (incr_vec[i].incr == increment)
2671 incr_vec[i].count++;
2672 found = true;
2674 /* If we previously recorded an initializer that doesn't
2675 dominate this candidate, it's not going to be useful to
2676 us after all. */
2677 if (incr_vec[i].initializer
2678 && !dominated_by_p (CDI_DOMINATORS,
2679 gimple_bb (c->cand_stmt),
2680 incr_vec[i].init_bb))
2682 incr_vec[i].initializer = NULL_TREE;
2683 incr_vec[i].init_bb = NULL;
2686 break;
2690 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2692 /* The first time we see an increment, create the entry for it.
2693 If this is the root candidate which doesn't have a basis, set
2694 the count to zero. We're only processing it so it can possibly
2695 provide an initializer for other candidates. */
2696 incr_vec[incr_vec_len].incr = increment;
2697 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2698 incr_vec[incr_vec_len].cost = COST_INFINITE;
2700 /* Optimistically record the first occurrence of this increment
2701 as providing an initializer (if it does); we will revise this
2702 opinion later if it doesn't dominate all other occurrences.
2703 Exception: increments of 0, 1 never need initializers;
2704 and phi adjustments don't ever provide initializers. */
2705 if (c->kind == CAND_ADD
2706 && !is_phi_adjust
2707 && c->index == increment
2708 && (increment > 1 || increment < 0)
2709 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2710 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2712 tree t0 = NULL_TREE;
2713 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2714 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2715 if (operand_equal_p (rhs1, c->base_expr, 0))
2716 t0 = rhs2;
2717 else if (operand_equal_p (rhs2, c->base_expr, 0))
2718 t0 = rhs1;
2719 if (t0
2720 && SSA_NAME_DEF_STMT (t0)
2721 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2723 incr_vec[incr_vec_len].initializer = t0;
2724 incr_vec[incr_vec_len++].init_bb
2725 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2727 else
2729 incr_vec[incr_vec_len].initializer = NULL_TREE;
2730 incr_vec[incr_vec_len++].init_bb = NULL;
2733 else
2735 incr_vec[incr_vec_len].initializer = NULL_TREE;
2736 incr_vec[incr_vec_len++].init_bb = NULL;
2741 /* Recursive helper function for record_phi_increments. */
2743 static void
2744 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2746 unsigned i;
2747 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2749 if (phi_cand->visited)
2750 return;
2751 phi_cand->visited = 1;
2753 for (i = 0; i < gimple_phi_num_args (phi); i++)
2755 tree arg = gimple_phi_arg_def (phi, i);
2757 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2759 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2761 if (gimple_code (arg_def) == GIMPLE_PHI)
2762 record_phi_increments_1 (basis, arg_def);
2763 else
2765 slsr_cand_t arg_cand = base_cand_from_table (arg);
2766 widest_int diff = arg_cand->index - basis->index;
2767 record_increment (arg_cand, diff, PHI_ADJUST);
2773 /* Given phi statement PHI that hides a candidate from its BASIS, find
2774 the increments along each incoming arc (recursively handling additional
2775 phis that may be present) and record them. These increments are the
2776 difference in index between the index-adjusting statements and the
2777 index of the basis. */
2779 static void
2780 record_phi_increments (slsr_cand_t basis, gimple *phi)
2782 record_phi_increments_1 (basis, phi);
2783 clear_visited (as_a <gphi *> (phi));
2786 /* Determine how many times each unique increment occurs in the set
2787 of candidates rooted at C's parent, recording the data in the
2788 increment vector. For each unique increment I, if an initializer
2789 T_0 = stride * I is provided by a candidate that dominates all
2790 candidates with the same increment, also record T_0 for subsequent
2791 use. */
2793 static void
2794 record_increments (slsr_cand_t c)
2796 if (!cand_already_replaced (c))
2798 if (!phi_dependent_cand_p (c))
2799 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2800 else
2802 /* A candidate with a basis hidden by a phi will have one
2803 increment for its relationship to the index represented by
2804 the phi, and potentially additional increments along each
2805 incoming edge. For the root of the dependency tree (which
2806 has no basis), process just the initial index in case it has
2807 an initializer that can be used by subsequent candidates. */
2808 record_increment (c, c->index, NOT_PHI_ADJUST);
2810 if (c->basis)
2811 record_phi_increments (lookup_cand (c->basis),
2812 lookup_cand (c->def_phi)->cand_stmt);
2816 if (c->sibling)
2817 record_increments (lookup_cand (c->sibling));
2819 if (c->dependent)
2820 record_increments (lookup_cand (c->dependent));
2823 /* Recursive helper function for phi_incr_cost. */
2825 static int
2826 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2827 int *savings)
2829 unsigned i;
2830 int cost = 0;
2831 slsr_cand_t basis = lookup_cand (c->basis);
2832 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2834 if (phi_cand->visited)
2835 return 0;
2836 phi_cand->visited = 1;
2838 for (i = 0; i < gimple_phi_num_args (phi); i++)
2840 tree arg = gimple_phi_arg_def (phi, i);
2842 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2844 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2846 if (gimple_code (arg_def) == GIMPLE_PHI)
2848 int feeding_savings = 0;
2849 tree feeding_var = gimple_phi_result (arg_def);
2850 cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2851 if (uses_consumed_by_stmt (feeding_var, phi))
2852 *savings += feeding_savings;
2854 else
2856 slsr_cand_t arg_cand = base_cand_from_table (arg);
2857 widest_int diff = arg_cand->index - basis->index;
2859 if (incr == diff)
2861 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2862 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2863 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2864 if (uses_consumed_by_stmt (lhs, phi))
2865 *savings += stmt_cost (arg_cand->cand_stmt, true);
2871 return cost;
2874 /* Add up and return the costs of introducing add statements that
2875 require the increment INCR on behalf of candidate C and phi
2876 statement PHI. Accumulate into *SAVINGS the potential savings
2877 from removing existing statements that feed PHI and have no other
2878 uses. */
2880 static int
2881 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2882 int *savings)
2884 int retval = phi_incr_cost_1 (c, incr, phi, savings);
2885 clear_visited (as_a <gphi *> (phi));
2886 return retval;
2889 /* Return the first candidate in the tree rooted at C that has not
2890 already been replaced, favoring siblings over dependents. */
2892 static slsr_cand_t
2893 unreplaced_cand_in_tree (slsr_cand_t c)
2895 if (!cand_already_replaced (c))
2896 return c;
2898 if (c->sibling)
2900 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2901 if (sib)
2902 return sib;
2905 if (c->dependent)
2907 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2908 if (dep)
2909 return dep;
2912 return NULL;
2915 /* Return TRUE if the candidates in the tree rooted at C should be
2916 optimized for speed, else FALSE. We estimate this based on the block
2917 containing the most dominant candidate in the tree that has not yet
2918 been replaced. */
2920 static bool
2921 optimize_cands_for_speed_p (slsr_cand_t c)
2923 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2924 gcc_assert (c2);
2925 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2928 /* Add COST_IN to the lowest cost of any dependent path starting at
2929 candidate C or any of its siblings, counting only candidates along
2930 such paths with increment INCR. Assume that replacing a candidate
2931 reduces cost by REPL_SAVINGS. Also account for savings from any
2932 statements that would go dead. If COUNT_PHIS is true, include
2933 costs of introducing feeding statements for conditional candidates. */
2935 static int
2936 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2937 const widest_int &incr, bool count_phis)
2939 int local_cost, sib_cost, savings = 0;
2940 widest_int cand_incr = cand_abs_increment (c);
2942 if (cand_already_replaced (c))
2943 local_cost = cost_in;
2944 else if (incr == cand_incr)
2945 local_cost = cost_in - repl_savings - c->dead_savings;
2946 else
2947 local_cost = cost_in - c->dead_savings;
2949 if (count_phis
2950 && phi_dependent_cand_p (c)
2951 && !cand_already_replaced (c))
2953 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2954 local_cost += phi_incr_cost (c, incr, phi, &savings);
2956 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2957 local_cost -= savings;
2960 if (c->dependent)
2961 local_cost = lowest_cost_path (local_cost, repl_savings,
2962 lookup_cand (c->dependent), incr,
2963 count_phis);
2965 if (c->sibling)
2967 sib_cost = lowest_cost_path (cost_in, repl_savings,
2968 lookup_cand (c->sibling), incr,
2969 count_phis);
2970 local_cost = MIN (local_cost, sib_cost);
2973 return local_cost;
2976 /* Compute the total savings that would accrue from all replacements
2977 in the candidate tree rooted at C, counting only candidates with
2978 increment INCR. Assume that replacing a candidate reduces cost
2979 by REPL_SAVINGS. Also account for savings from statements that
2980 would go dead. */
2982 static int
2983 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2984 bool count_phis)
2986 int savings = 0;
2987 widest_int cand_incr = cand_abs_increment (c);
2989 if (incr == cand_incr && !cand_already_replaced (c))
2990 savings += repl_savings + c->dead_savings;
2992 if (count_phis
2993 && phi_dependent_cand_p (c)
2994 && !cand_already_replaced (c))
2996 int phi_savings = 0;
2997 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2998 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
3000 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
3001 savings += phi_savings;
3004 if (c->dependent)
3005 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
3006 count_phis);
3008 if (c->sibling)
3009 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
3010 count_phis);
3012 return savings;
3015 /* Use target-specific costs to determine and record which increments
3016 in the current candidate tree are profitable to replace, assuming
3017 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3018 the candidate tree.
3020 One slight limitation here is that we don't account for the possible
3021 introduction of casts in some cases. See replace_one_candidate for
3022 the cases where these are introduced. This should probably be cleaned
3023 up sometime. */
3025 static void
3026 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3028 unsigned i;
3030 for (i = 0; i < incr_vec_len; i++)
3032 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3034 /* If somehow this increment is bigger than a HWI, we won't
3035 be optimizing candidates that use it. And if the increment
3036 has a count of zero, nothing will be done with it. */
3037 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3038 incr_vec[i].cost = COST_INFINITE;
3040 /* Increments of 0, 1, and -1 are always profitable to replace,
3041 because they always replace a multiply or add with an add or
3042 copy, and may cause one or more existing instructions to go
3043 dead. Exception: -1 can't be assumed to be profitable for
3044 pointer addition. */
3045 else if (incr == 0
3046 || incr == 1
3047 || (incr == -1
3048 && !POINTER_TYPE_P (first_dep->cand_type)))
3049 incr_vec[i].cost = COST_NEUTRAL;
3051 /* If we need to add an initializer, give up if a cast from the
3052 candidate's type to its stride's type can lose precision.
3053 Note that this already takes into account that the stride may
3054 have been cast to a wider type, in which case this test won't
3055 fire. Example:
3057 short int _1;
3058 _2 = (int) _1;
3059 _3 = _2 * 10;
3060 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3061 _5 = _2 * 15;
3062 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3064 Although the stride was a short int initially, the stride
3065 used in the analysis has been widened to an int, and such
3066 widening will be done in the initializer as well. */
3067 else if (!incr_vec[i].initializer
3068 && TREE_CODE (first_dep->stride) != INTEGER_CST
3069 && !legal_cast_p_1 (first_dep->stride_type,
3070 TREE_TYPE (gimple_assign_lhs
3071 (first_dep->cand_stmt))))
3072 incr_vec[i].cost = COST_INFINITE;
3074 /* If we need to add an initializer, make sure we don't introduce
3075 a multiply by a pointer type, which can happen in certain cast
3076 scenarios. */
3077 else if (!incr_vec[i].initializer
3078 && TREE_CODE (first_dep->stride) != INTEGER_CST
3079 && POINTER_TYPE_P (first_dep->stride_type))
3080 incr_vec[i].cost = COST_INFINITE;
3082 /* For any other increment, if this is a multiply candidate, we
3083 must introduce a temporary T and initialize it with
3084 T_0 = stride * increment. When optimizing for speed, walk the
3085 candidate tree to calculate the best cost reduction along any
3086 path; if it offsets the fixed cost of inserting the initializer,
3087 replacing the increment is profitable. When optimizing for
3088 size, instead calculate the total cost reduction from replacing
3089 all candidates with this increment. */
3090 else if (first_dep->kind == CAND_MULT)
3092 int cost = mult_by_coeff_cost (incr, mode, speed);
3093 int repl_savings;
3095 if (tree_fits_shwi_p (first_dep->stride))
3097 HOST_WIDE_INT hwi_stride = tree_to_shwi (first_dep->stride);
3098 repl_savings = mult_by_coeff_cost (hwi_stride, mode, speed);
3100 else
3101 repl_savings = mul_cost (speed, mode);
3102 repl_savings -= add_cost (speed, mode);
3104 if (speed)
3105 cost = lowest_cost_path (cost, repl_savings, first_dep,
3106 incr_vec[i].incr, COUNT_PHIS);
3107 else
3108 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3109 COUNT_PHIS);
3111 incr_vec[i].cost = cost;
3114 /* If this is an add candidate, the initializer may already
3115 exist, so only calculate the cost of the initializer if it
3116 doesn't. We are replacing one add with another here, so the
3117 known replacement savings is zero. We will account for removal
3118 of dead instructions in lowest_cost_path or total_savings. */
3119 else
3121 int cost = 0;
3122 if (!incr_vec[i].initializer)
3123 cost = mult_by_coeff_cost (incr, mode, speed);
3125 if (speed)
3126 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3127 DONT_COUNT_PHIS);
3128 else
3129 cost -= total_savings (0, first_dep, incr_vec[i].incr,
3130 DONT_COUNT_PHIS);
3132 incr_vec[i].cost = cost;
3137 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3138 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3139 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3140 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3141 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3143 static basic_block
3144 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3145 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3147 basic_block ncd;
3149 if (!bb1)
3151 *where = c2;
3152 return bb2;
3155 if (!bb2)
3157 *where = c1;
3158 return bb1;
3161 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3163 /* If both candidates are in the same block, the earlier
3164 candidate wins. */
3165 if (bb1 == ncd && bb2 == ncd)
3167 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3168 *where = c2;
3169 else
3170 *where = c1;
3173 /* Otherwise, if one of them produced a candidate in the
3174 dominator, that one wins. */
3175 else if (bb1 == ncd)
3176 *where = c1;
3178 else if (bb2 == ncd)
3179 *where = c2;
3181 /* If neither matches the dominator, neither wins. */
3182 else
3183 *where = NULL;
3185 return ncd;
3188 /* Consider all candidates that feed PHI. Find the nearest common
3189 dominator of those candidates requiring the given increment INCR.
3190 Further find and return the nearest common dominator of this result
3191 with block NCD. If the returned block contains one or more of the
3192 candidates, return the earliest candidate in the block in *WHERE. */
3194 static basic_block
3195 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3196 basic_block ncd, slsr_cand_t *where)
3198 unsigned i;
3199 slsr_cand_t basis = lookup_cand (c->basis);
3200 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3202 for (i = 0; i < gimple_phi_num_args (phi); i++)
3204 tree arg = gimple_phi_arg_def (phi, i);
3206 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3208 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3210 if (gimple_code (arg_def) == GIMPLE_PHI)
3211 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3212 where);
3213 else
3215 slsr_cand_t arg_cand = base_cand_from_table (arg);
3216 widest_int diff = arg_cand->index - basis->index;
3217 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3219 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3220 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3225 return ncd;
3228 /* Consider the candidate C together with any candidates that feed
3229 C's phi dependence (if any). Find and return the nearest common
3230 dominator of those candidates requiring the given increment INCR.
3231 If the returned block contains one or more of the candidates,
3232 return the earliest candidate in the block in *WHERE. */
3234 static basic_block
3235 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3237 basic_block ncd = NULL;
3239 if (cand_abs_increment (c) == incr)
3241 ncd = gimple_bb (c->cand_stmt);
3242 *where = c;
3245 if (phi_dependent_cand_p (c))
3246 ncd = ncd_with_phi (c, incr,
3247 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3248 ncd, where);
3250 return ncd;
3253 /* Consider all candidates in the tree rooted at C for which INCR
3254 represents the required increment of C relative to its basis.
3255 Find and return the basic block that most nearly dominates all
3256 such candidates. If the returned block contains one or more of
3257 the candidates, return the earliest candidate in the block in
3258 *WHERE. */
3260 static basic_block
3261 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3262 slsr_cand_t *where)
3264 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3265 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3267 /* First find the NCD of all siblings and dependents. */
3268 if (c->sibling)
3269 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3270 incr, &sib_where);
3271 if (c->dependent)
3272 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3273 incr, &dep_where);
3274 if (!sib_ncd && !dep_ncd)
3276 new_where = NULL;
3277 ncd = NULL;
3279 else if (sib_ncd && !dep_ncd)
3281 new_where = sib_where;
3282 ncd = sib_ncd;
3284 else if (dep_ncd && !sib_ncd)
3286 new_where = dep_where;
3287 ncd = dep_ncd;
3289 else
3290 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3291 dep_where, &new_where);
3293 /* If the candidate's increment doesn't match the one we're interested
3294 in (and nor do any increments for feeding defs of a phi-dependence),
3295 then the result depends only on siblings and dependents. */
3296 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3298 if (!this_ncd || cand_already_replaced (c))
3300 *where = new_where;
3301 return ncd;
3304 /* Otherwise, compare this candidate with the result from all siblings
3305 and dependents. */
3306 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3308 return ncd;
3311 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3313 static inline bool
3314 profitable_increment_p (unsigned index)
3316 return (incr_vec[index].cost <= COST_NEUTRAL);
3319 /* For each profitable increment in the increment vector not equal to
3320 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3321 dominator of all statements in the candidate chain rooted at C
3322 that require that increment, and insert an initializer
3323 T_0 = stride * increment at that location. Record T_0 with the
3324 increment record. */
3326 static void
3327 insert_initializers (slsr_cand_t c)
3329 unsigned i;
3331 for (i = 0; i < incr_vec_len; i++)
3333 basic_block bb;
3334 slsr_cand_t where = NULL;
3335 gassign *init_stmt;
3336 gassign *cast_stmt = NULL;
3337 tree new_name, incr_tree, init_stride;
3338 widest_int incr = incr_vec[i].incr;
3340 if (!profitable_increment_p (i)
3341 || incr == 1
3342 || (incr == -1
3343 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3344 || incr == 0)
3345 continue;
3347 /* We may have already identified an existing initializer that
3348 will suffice. */
3349 if (incr_vec[i].initializer)
3351 if (dump_file && (dump_flags & TDF_DETAILS))
3353 fputs ("Using existing initializer: ", dump_file);
3354 print_gimple_stmt (dump_file,
3355 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3356 0, 0);
3358 continue;
3361 /* Find the block that most closely dominates all candidates
3362 with this increment. If there is at least one candidate in
3363 that block, the earliest one will be returned in WHERE. */
3364 bb = nearest_common_dominator_for_cands (c, incr, &where);
3366 /* If the NCD is not dominated by the block containing the
3367 definition of the stride, we can't legally insert a
3368 single initializer. Mark the increment as unprofitable
3369 so we don't make any replacements. FIXME: Multiple
3370 initializers could be placed with more analysis. */
3371 gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3372 basic_block stride_bb = gimple_bb (stride_def);
3374 if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file,
3378 "Initializer #%d cannot be legally placed\n", i);
3379 incr_vec[i].cost = COST_INFINITE;
3380 continue;
3383 /* If the nominal stride has a different type than the recorded
3384 stride type, build a cast from the nominal stride to that type. */
3385 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3387 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3388 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3390 else
3391 init_stride = c->stride;
3393 /* Create a new SSA name to hold the initializer's value. */
3394 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3395 incr_vec[i].initializer = new_name;
3397 /* Create the initializer and insert it in the latest possible
3398 dominating position. */
3399 incr_tree = wide_int_to_tree (c->stride_type, incr);
3400 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3401 init_stride, incr_tree);
3402 if (where)
3404 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3405 location_t loc = gimple_location (where->cand_stmt);
3407 if (cast_stmt)
3409 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3410 gimple_set_location (cast_stmt, loc);
3413 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3414 gimple_set_location (init_stmt, loc);
3416 else
3418 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3419 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3420 location_t loc = gimple_location (basis_stmt);
3422 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3424 if (cast_stmt)
3426 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3427 gimple_set_location (cast_stmt, loc);
3429 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3431 else
3433 if (cast_stmt)
3435 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3436 gimple_set_location (cast_stmt, loc);
3438 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
3441 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3444 if (dump_file && (dump_flags & TDF_DETAILS))
3446 if (cast_stmt)
3448 fputs ("Inserting stride cast: ", dump_file);
3449 print_gimple_stmt (dump_file, cast_stmt, 0);
3451 fputs ("Inserting initializer: ", dump_file);
3452 print_gimple_stmt (dump_file, init_stmt, 0);
3457 /* Recursive helper function for all_phi_incrs_profitable. */
3459 static bool
3460 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3462 unsigned i;
3463 slsr_cand_t basis = lookup_cand (c->basis);
3464 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3466 if (phi_cand->visited)
3467 return true;
3469 phi_cand->visited = 1;
3470 (*spread)++;
3472 /* If the basis doesn't dominate the PHI (including when the PHI is
3473 in the same block as the basis), we won't be able to create a PHI
3474 using the basis here. */
3475 basic_block basis_bb = gimple_bb (basis->cand_stmt);
3476 basic_block phi_bb = gimple_bb (phi);
3478 if (phi_bb == basis_bb
3479 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3480 return false;
3482 for (i = 0; i < gimple_phi_num_args (phi); i++)
3484 /* If the PHI arg resides in a block not dominated by the basis,
3485 we won't be able to create a PHI using the basis here. */
3486 basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3488 if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3489 return false;
3491 tree arg = gimple_phi_arg_def (phi, i);
3493 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3495 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3497 if (gimple_code (arg_def) == GIMPLE_PHI)
3499 if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def),
3500 spread)
3501 || *spread > MAX_SPREAD)
3502 return false;
3504 else
3506 int j;
3507 slsr_cand_t arg_cand = base_cand_from_table (arg);
3508 widest_int increment = arg_cand->index - basis->index;
3510 if (!address_arithmetic_p && wi::neg_p (increment))
3511 increment = -increment;
3513 j = incr_vec_index (increment);
3515 if (dump_file && (dump_flags & TDF_DETAILS))
3517 fprintf (dump_file, " Conditional candidate %d, phi: ",
3518 c->cand_num);
3519 print_gimple_stmt (dump_file, phi, 0);
3520 fputs (" increment: ", dump_file);
3521 print_decs (increment, dump_file);
3522 if (j < 0)
3523 fprintf (dump_file,
3524 "\n Not replaced; incr_vec overflow.\n");
3525 else {
3526 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3527 if (profitable_increment_p (j))
3528 fputs (" Replacing...\n", dump_file);
3529 else
3530 fputs (" Not replaced.\n", dump_file);
3534 if (j < 0 || !profitable_increment_p (j))
3535 return false;
3540 return true;
3543 /* Return TRUE iff all required increments for candidates feeding PHI
3544 are profitable (and legal!) to replace on behalf of candidate C. */
3546 static bool
3547 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3549 int spread = 0;
3550 bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3551 clear_visited (phi);
3552 return retval;
3555 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3556 type TO_TYPE, and insert it in front of the statement represented
3557 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3558 the new SSA name. */
3560 static tree
3561 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3563 tree cast_lhs;
3564 gassign *cast_stmt;
3565 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3567 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3568 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3569 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3570 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3572 if (dump_file && (dump_flags & TDF_DETAILS))
3574 fputs (" Inserting: ", dump_file);
3575 print_gimple_stmt (dump_file, cast_stmt, 0);
3578 return cast_lhs;
3581 /* Replace the RHS of the statement represented by candidate C with
3582 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3583 leave C unchanged or just interchange its operands. The original
3584 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3585 If the replacement was made and we are doing a details dump,
3586 return the revised statement, else NULL. */
3588 static gimple *
3589 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3590 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3591 slsr_cand_t c)
3593 if (new_code != old_code
3594 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3595 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3596 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3597 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3599 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3600 slsr_cand_t cc = c;
3601 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3602 update_stmt (gsi_stmt (gsi));
3603 c->cand_stmt = gsi_stmt (gsi);
3604 while (cc->next_interp)
3606 cc = lookup_cand (cc->next_interp);
3607 cc->cand_stmt = gsi_stmt (gsi);
3610 if (dump_file && (dump_flags & TDF_DETAILS))
3611 return gsi_stmt (gsi);
3614 else if (dump_file && (dump_flags & TDF_DETAILS))
3615 fputs (" (duplicate, not actually replacing)\n", dump_file);
3617 return NULL;
3620 /* Strength-reduce the statement represented by candidate C by replacing
3621 it with an equivalent addition or subtraction. I is the index into
3622 the increment vector identifying C's increment. NEW_VAR is used to
3623 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3624 is the rhs1 to use in creating the add/subtract. */
3626 static void
3627 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3629 gimple *stmt_to_print = NULL;
3630 tree orig_rhs1, orig_rhs2;
3631 tree rhs2;
3632 enum tree_code orig_code, repl_code;
3633 widest_int cand_incr;
3635 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3636 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3637 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3638 cand_incr = cand_increment (c);
3640 if (dump_file && (dump_flags & TDF_DETAILS))
3642 fputs ("Replacing: ", dump_file);
3643 print_gimple_stmt (dump_file, c->cand_stmt, 0);
3644 stmt_to_print = c->cand_stmt;
3647 if (address_arithmetic_p)
3648 repl_code = POINTER_PLUS_EXPR;
3649 else
3650 repl_code = PLUS_EXPR;
3652 /* If the increment has an initializer T_0, replace the candidate
3653 statement with an add of the basis name and the initializer. */
3654 if (incr_vec[i].initializer)
3656 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3657 tree orig_type = TREE_TYPE (orig_rhs2);
3659 if (types_compatible_p (orig_type, init_type))
3660 rhs2 = incr_vec[i].initializer;
3661 else
3662 rhs2 = introduce_cast_before_cand (c, orig_type,
3663 incr_vec[i].initializer);
3665 if (incr_vec[i].incr != cand_incr)
3667 gcc_assert (repl_code == PLUS_EXPR);
3668 repl_code = MINUS_EXPR;
3671 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3672 orig_code, orig_rhs1, orig_rhs2,
3676 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3677 with a subtract of the stride from the basis name, a copy
3678 from the basis name, or an add of the stride to the basis
3679 name, respectively. It may be necessary to introduce a
3680 cast (or reuse an existing cast). */
3681 else if (cand_incr == 1)
3683 tree stride_type = TREE_TYPE (c->stride);
3684 tree orig_type = TREE_TYPE (orig_rhs2);
3686 if (types_compatible_p (orig_type, stride_type))
3687 rhs2 = c->stride;
3688 else
3689 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3691 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3692 orig_code, orig_rhs1, orig_rhs2,
3696 else if (cand_incr == -1)
3698 tree stride_type = TREE_TYPE (c->stride);
3699 tree orig_type = TREE_TYPE (orig_rhs2);
3700 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3702 if (types_compatible_p (orig_type, stride_type))
3703 rhs2 = c->stride;
3704 else
3705 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3707 if (orig_code != MINUS_EXPR
3708 || !operand_equal_p (basis_name, orig_rhs1, 0)
3709 || !operand_equal_p (rhs2, orig_rhs2, 0))
3711 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3712 slsr_cand_t cc = c;
3713 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3714 update_stmt (gsi_stmt (gsi));
3715 c->cand_stmt = gsi_stmt (gsi);
3716 while (cc->next_interp)
3718 cc = lookup_cand (cc->next_interp);
3719 cc->cand_stmt = gsi_stmt (gsi);
3722 if (dump_file && (dump_flags & TDF_DETAILS))
3723 stmt_to_print = gsi_stmt (gsi);
3725 else if (dump_file && (dump_flags & TDF_DETAILS))
3726 fputs (" (duplicate, not actually replacing)\n", dump_file);
3729 else if (cand_incr == 0)
3731 tree lhs = gimple_assign_lhs (c->cand_stmt);
3732 tree lhs_type = TREE_TYPE (lhs);
3733 tree basis_type = TREE_TYPE (basis_name);
3735 if (types_compatible_p (lhs_type, basis_type))
3737 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3738 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3739 slsr_cand_t cc = c;
3740 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3741 gsi_replace (&gsi, copy_stmt, false);
3742 c->cand_stmt = copy_stmt;
3743 while (cc->next_interp)
3745 cc = lookup_cand (cc->next_interp);
3746 cc->cand_stmt = copy_stmt;
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 stmt_to_print = copy_stmt;
3752 else
3754 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3755 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3756 slsr_cand_t cc = c;
3757 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3758 gsi_replace (&gsi, cast_stmt, false);
3759 c->cand_stmt = cast_stmt;
3760 while (cc->next_interp)
3762 cc = lookup_cand (cc->next_interp);
3763 cc->cand_stmt = cast_stmt;
3766 if (dump_file && (dump_flags & TDF_DETAILS))
3767 stmt_to_print = cast_stmt;
3770 else
3771 gcc_unreachable ();
3773 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3775 fputs ("With: ", dump_file);
3776 print_gimple_stmt (dump_file, stmt_to_print, 0);
3777 fputs ("\n", dump_file);
3781 /* For each candidate in the tree rooted at C, replace it with
3782 an increment if such has been shown to be profitable. */
3784 static void
3785 replace_profitable_candidates (slsr_cand_t c)
3787 if (!cand_already_replaced (c))
3789 widest_int increment = cand_abs_increment (c);
3790 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3791 int i;
3793 i = incr_vec_index (increment);
3795 /* Only process profitable increments. Nothing useful can be done
3796 to a cast or copy. */
3797 if (i >= 0
3798 && profitable_increment_p (i)
3799 && orig_code != SSA_NAME
3800 && !CONVERT_EXPR_CODE_P (orig_code))
3802 if (phi_dependent_cand_p (c))
3804 gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3806 if (all_phi_incrs_profitable (c, phi))
3808 /* Look up the LHS SSA name from C's basis. This will be
3809 the RHS1 of the adds we will introduce to create new
3810 phi arguments. */
3811 slsr_cand_t basis = lookup_cand (c->basis);
3812 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3814 /* Create a new phi statement that will represent C's true
3815 basis after the transformation is complete. */
3816 location_t loc = gimple_location (c->cand_stmt);
3817 tree name = create_phi_basis (c, phi, basis_name,
3818 loc, UNKNOWN_STRIDE);
3820 /* Replace C with an add of the new basis phi and the
3821 increment. */
3822 replace_one_candidate (c, i, name);
3825 else
3827 slsr_cand_t basis = lookup_cand (c->basis);
3828 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3829 replace_one_candidate (c, i, basis_name);
3834 if (c->sibling)
3835 replace_profitable_candidates (lookup_cand (c->sibling));
3837 if (c->dependent)
3838 replace_profitable_candidates (lookup_cand (c->dependent));
3841 /* Analyze costs of related candidates in the candidate vector,
3842 and make beneficial replacements. */
3844 static void
3845 analyze_candidates_and_replace (void)
3847 unsigned i;
3848 slsr_cand_t c;
3850 /* Each candidate that has a null basis and a non-null
3851 dependent is the root of a tree of related statements.
3852 Analyze each tree to determine a subset of those
3853 statements that can be replaced with maximum benefit. */
3854 FOR_EACH_VEC_ELT (cand_vec, i, c)
3856 slsr_cand_t first_dep;
3858 if (c->basis != 0 || c->dependent == 0)
3859 continue;
3861 if (dump_file && (dump_flags & TDF_DETAILS))
3862 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3863 c->cand_num);
3865 first_dep = lookup_cand (c->dependent);
3867 /* If this is a chain of CAND_REFs, unconditionally replace
3868 each of them with a strength-reduced data reference. */
3869 if (c->kind == CAND_REF)
3870 replace_refs (c);
3872 /* If the common stride of all related candidates is a known
3873 constant, each candidate without a phi-dependence can be
3874 profitably replaced. Each replaces a multiply by a single
3875 add, with the possibility that a feeding add also goes dead.
3876 A candidate with a phi-dependence is replaced only if the
3877 compensation code it requires is offset by the strength
3878 reduction savings. */
3879 else if (TREE_CODE (c->stride) == INTEGER_CST)
3880 replace_uncond_cands_and_profitable_phis (first_dep);
3882 /* When the stride is an SSA name, it may still be profitable
3883 to replace some or all of the dependent candidates, depending
3884 on whether the introduced increments can be reused, or are
3885 less expensive to calculate than the replaced statements. */
3886 else
3888 machine_mode mode;
3889 bool speed;
3891 /* Determine whether we'll be generating pointer arithmetic
3892 when replacing candidates. */
3893 address_arithmetic_p = (c->kind == CAND_ADD
3894 && POINTER_TYPE_P (c->cand_type));
3896 /* If all candidates have already been replaced under other
3897 interpretations, nothing remains to be done. */
3898 if (!count_candidates (c))
3899 continue;
3901 /* Construct an array of increments for this candidate chain. */
3902 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3903 incr_vec_len = 0;
3904 record_increments (c);
3906 /* Determine which increments are profitable to replace. */
3907 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3908 speed = optimize_cands_for_speed_p (c);
3909 analyze_increments (first_dep, mode, speed);
3911 /* Insert initializers of the form T_0 = stride * increment
3912 for use in profitable replacements. */
3913 insert_initializers (first_dep);
3914 dump_incr_vec ();
3916 /* Perform the replacements. */
3917 replace_profitable_candidates (first_dep);
3918 free (incr_vec);
3922 /* For conditional candidates, we may have uncommitted insertions
3923 on edges to clean up. */
3924 gsi_commit_edge_inserts ();
3927 namespace {
3929 const pass_data pass_data_strength_reduction =
3931 GIMPLE_PASS, /* type */
3932 "slsr", /* name */
3933 OPTGROUP_NONE, /* optinfo_flags */
3934 TV_GIMPLE_SLSR, /* tv_id */
3935 ( PROP_cfg | PROP_ssa ), /* properties_required */
3936 0, /* properties_provided */
3937 0, /* properties_destroyed */
3938 0, /* todo_flags_start */
3939 0, /* todo_flags_finish */
3942 class pass_strength_reduction : public gimple_opt_pass
3944 public:
3945 pass_strength_reduction (gcc::context *ctxt)
3946 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3949 /* opt_pass methods: */
3950 virtual bool gate (function *) { return flag_tree_slsr; }
3951 virtual unsigned int execute (function *);
3953 }; // class pass_strength_reduction
3955 unsigned
3956 pass_strength_reduction::execute (function *fun)
3958 /* Create the obstack where candidates will reside. */
3959 gcc_obstack_init (&cand_obstack);
3961 /* Allocate the candidate vector. */
3962 cand_vec.create (128);
3964 /* Allocate the mapping from statements to candidate indices. */
3965 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3967 /* Create the obstack where candidate chains will reside. */
3968 gcc_obstack_init (&chain_obstack);
3970 /* Allocate the mapping from base expressions to candidate chains. */
3971 base_cand_map = new hash_table<cand_chain_hasher> (500);
3973 /* Allocate the mapping from bases to alternative bases. */
3974 alt_base_map = new hash_map<tree, tree>;
3976 /* Initialize the loop optimizer. We need to detect flow across
3977 back edges, and this gives us dominator information as well. */
3978 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3980 /* Walk the CFG in predominator order looking for strength reduction
3981 candidates. */
3982 find_candidates_dom_walker (CDI_DOMINATORS)
3983 .walk (fun->cfg->x_entry_block_ptr);
3985 if (dump_file && (dump_flags & TDF_DETAILS))
3987 dump_cand_vec ();
3988 dump_cand_chains ();
3991 delete alt_base_map;
3992 free_affine_expand_cache (&name_expansions);
3994 /* Analyze costs and make appropriate replacements. */
3995 analyze_candidates_and_replace ();
3997 loop_optimizer_finalize ();
3998 delete base_cand_map;
3999 base_cand_map = NULL;
4000 obstack_free (&chain_obstack, NULL);
4001 delete stmt_cand_map;
4002 cand_vec.release ();
4003 obstack_free (&cand_obstack, NULL);
4005 return 0;
4008 } // anon namespace
4010 gimple_opt_pass *
4011 make_pass_strength_reduction (gcc::context *ctxt)
4013 return new pass_strength_reduction (ctxt);