Use gimple_phi in many more places.
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blobea1f99ac0ea782bb91e3bb7b54f1b9d9aa83cd55
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "hash-map.h"
41 #include "hash-table.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "stor-layout.h"
51 #include "expr.h"
52 #include "tree-pass.h"
53 #include "cfgloop.h"
54 #include "gimple-pretty-print.h"
55 #include "gimple-ssa.h"
56 #include "tree-cfg.h"
57 #include "tree-phinodes.h"
58 #include "ssa-iterators.h"
59 #include "stringpool.h"
60 #include "tree-ssanames.h"
61 #include "domwalk.h"
62 #include "expmed.h"
63 #include "params.h"
64 #include "tree-ssa-address.h"
65 #include "tree-affine.h"
66 #include "wide-int-print.h"
67 #include "builtins.h"
69 /* Information about a strength reduction candidate. Each statement
70 in the candidate table represents an expression of one of the
71 following forms (the special case of CAND_REF will be described
72 later):
74 (CAND_MULT) S1: X = (B + i) * S
75 (CAND_ADD) S1: X = B + (i * S)
77 Here X and B are SSA names, i is an integer constant, and S is
78 either an SSA name or a constant. We call B the "base," i the
79 "index", and S the "stride."
81 Any statement S0 that dominates S1 and is of the form:
83 (CAND_MULT) S0: Y = (B + i') * S
84 (CAND_ADD) S0: Y = B + (i' * S)
86 is called a "basis" for S1. In both cases, S1 may be replaced by
88 S1': X = Y + (i - i') * S,
90 where (i - i') * S is folded to the extent possible.
92 All gimple statements are visited in dominator order, and each
93 statement that may contribute to one of the forms of S1 above is
94 given at least one entry in the candidate table. Such statements
95 include addition, pointer addition, subtraction, multiplication,
96 negation, copies, and nontrivial type casts. If a statement may
97 represent more than one expression of the forms of S1 above,
98 multiple "interpretations" are stored in the table and chained
99 together. Examples:
101 * An add of two SSA names may treat either operand as the base.
102 * A multiply of two SSA names, likewise.
103 * A copy or cast may be thought of as either a CAND_MULT with
104 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
106 Candidate records are allocated from an obstack. They are addressed
107 both from a hash table keyed on S1, and from a vector of candidate
108 pointers arranged in predominator order.
110 Opportunity note
111 ----------------
112 Currently we don't recognize:
114 S0: Y = (S * i') - B
115 S1: X = (S * i) - B
117 as a strength reduction opportunity, even though this S1 would
118 also be replaceable by the S1' above. This can be added if it
119 comes up in practice.
121 Strength reduction in addressing
122 --------------------------------
123 There is another kind of candidate known as CAND_REF. A CAND_REF
124 describes a statement containing a memory reference having
125 complex addressing that might benefit from strength reduction.
126 Specifically, we are interested in references for which
127 get_inner_reference returns a base address, offset, and bitpos as
128 follows:
130 base: MEM_REF (T1, C1)
131 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
132 bitpos: C4 * BITS_PER_UNIT
134 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
135 arbitrary integer constants. Note that C2 may be zero, in which
136 case the offset will be MULT_EXPR (T2, C3).
138 When this pattern is recognized, the original memory reference
139 can be replaced with:
141 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
142 C1 + (C2 * C3) + C4)
144 which distributes the multiply to allow constant folding. When
145 two or more addressing expressions can be represented by MEM_REFs
146 of this form, differing only in the constants C1, C2, and C4,
147 making this substitution produces more efficient addressing during
148 the RTL phases. When there are not at least two expressions with
149 the same values of T1, T2, and C3, there is nothing to be gained
150 by the replacement.
152 Strength reduction of CAND_REFs uses the same infrastructure as
153 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
154 field, MULT_EXPR (T2, C3) in the stride (S) field, and
155 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
156 is thus another CAND_REF with the same B and S values. When at
157 least two CAND_REFs are chained together using the basis relation,
158 each of them is replaced as above, resulting in improved code
159 generation for addressing.
161 Conditional candidates
162 ======================
164 Conditional candidates are best illustrated with an example.
165 Consider the code sequence:
167 (1) x_0 = ...;
168 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
169 if (...)
170 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
171 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
172 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
173 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
175 Here strength reduction is complicated by the uncertain value of x_2.
176 A legitimate transformation is:
178 (1) x_0 = ...;
179 (2) a_0 = x_0 * 5;
180 if (...)
182 (3) [x_1 = x_0 + 1;]
183 (3a) t_1 = a_0 + 5;
185 (4) [x_2 = PHI <x_0, x_1>;]
186 (4a) t_2 = PHI <a_0, t_1>;
187 (5) [x_3 = x_2 + 1;]
188 (6r) a_1 = t_2 + 5;
190 where the bracketed instructions may go dead.
192 To recognize this opportunity, we have to observe that statement (6)
193 has a "hidden basis" (2). The hidden basis is unlike a normal basis
194 in that the statement and the hidden basis have different base SSA
195 names (x_2 and x_0, respectively). The relationship is established
196 when a statement's base name (x_2) is defined by a phi statement (4),
197 each argument of which (x_0, x_1) has an identical "derived base name."
198 If the argument is defined by a candidate (as x_1 is by (3)) that is a
199 CAND_ADD having a stride of 1, the derived base name of the argument is
200 the base name of the candidate (x_0). Otherwise, the argument itself
201 is its derived base name (as is the case with argument x_0).
203 The hidden basis for statement (6) is the nearest dominating candidate
204 whose base name is the derived base name (x_0) of the feeding phi (4),
205 and whose stride is identical to that of the statement. We can then
206 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
207 allowing the final replacement of (6) by the strength-reduced (6r).
209 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
210 A CAND_PHI is not a candidate for replacement, but is maintained in the
211 candidate table to ease discovery of hidden bases. Any phi statement
212 whose arguments share a common derived base name is entered into the
213 table with the derived base name, an (arbitrary) index of zero, and a
214 stride of 1. A statement with a hidden basis can then be detected by
215 simply looking up its feeding phi definition in the candidate table,
216 extracting the derived base name, and searching for a basis in the
217 usual manner after substituting the derived base name.
219 Note that the transformation is only valid when the original phi and
220 the statements that define the phi's arguments are all at the same
221 position in the loop hierarchy. */
224 /* Index into the candidate vector, offset by 1. VECs are zero-based,
225 while cand_idx's are one-based, with zero indicating null. */
226 typedef unsigned cand_idx;
228 /* The kind of candidate. */
229 enum cand_kind
231 CAND_MULT,
232 CAND_ADD,
233 CAND_REF,
234 CAND_PHI
237 struct slsr_cand_d
239 /* The candidate statement S1. */
240 gimple cand_stmt;
242 /* The base expression B: often an SSA name, but not always. */
243 tree base_expr;
245 /* The stride S. */
246 tree stride;
248 /* The index constant i. */
249 widest_int index;
251 /* The type of the candidate. This is normally the type of base_expr,
252 but casts may have occurred when combining feeding instructions.
253 A candidate can only be a basis for candidates of the same final type.
254 (For CAND_REFs, this is the type to be used for operand 1 of the
255 replacement MEM_REF.) */
256 tree cand_type;
258 /* The kind of candidate (CAND_MULT, etc.). */
259 enum cand_kind kind;
261 /* Index of this candidate in the candidate vector. */
262 cand_idx cand_num;
264 /* Index of the next candidate record for the same statement.
265 A statement may be useful in more than one way (e.g., due to
266 commutativity). So we can have multiple "interpretations"
267 of a statement. */
268 cand_idx next_interp;
270 /* Index of the basis statement S0, if any, in the candidate vector. */
271 cand_idx basis;
273 /* First candidate for which this candidate is a basis, if one exists. */
274 cand_idx dependent;
276 /* Next candidate having the same basis as this one. */
277 cand_idx sibling;
279 /* If this is a conditional candidate, the CAND_PHI candidate
280 that defines the base SSA name B. */
281 cand_idx def_phi;
283 /* Savings that can be expected from eliminating dead code if this
284 candidate is replaced. */
285 int dead_savings;
288 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
289 typedef const struct slsr_cand_d *const_slsr_cand_t;
291 /* Pointers to candidates are chained together as part of a mapping
292 from base expressions to the candidates that use them. */
294 struct cand_chain_d
296 /* Base expression for the chain of candidates: often, but not
297 always, an SSA name. */
298 tree base_expr;
300 /* Pointer to a candidate. */
301 slsr_cand_t cand;
303 /* Chain pointer. */
304 struct cand_chain_d *next;
308 typedef struct cand_chain_d cand_chain, *cand_chain_t;
309 typedef const struct cand_chain_d *const_cand_chain_t;
311 /* Information about a unique "increment" associated with candidates
312 having an SSA name for a stride. An increment is the difference
313 between the index of the candidate and the index of its basis,
314 i.e., (i - i') as discussed in the module commentary.
316 When we are not going to generate address arithmetic we treat
317 increments that differ only in sign as the same, allowing sharing
318 of the cost of initializers. The absolute value of the increment
319 is stored in the incr_info. */
321 struct incr_info_d
323 /* The increment that relates a candidate to its basis. */
324 widest_int incr;
326 /* How many times the increment occurs in the candidate tree. */
327 unsigned count;
329 /* Cost of replacing candidates using this increment. Negative and
330 zero costs indicate replacement should be performed. */
331 int cost;
333 /* If this increment is profitable but is not -1, 0, or 1, it requires
334 an initializer T_0 = stride * incr to be found or introduced in the
335 nearest common dominator of all candidates. This field holds T_0
336 for subsequent use. */
337 tree initializer;
339 /* If the initializer was found to already exist, this is the block
340 where it was found. */
341 basic_block init_bb;
344 typedef struct incr_info_d incr_info, *incr_info_t;
346 /* Candidates are maintained in a vector. If candidate X dominates
347 candidate Y, then X appears before Y in the vector; but the
348 converse does not necessarily hold. */
349 static vec<slsr_cand_t> cand_vec;
351 enum cost_consts
353 COST_NEUTRAL = 0,
354 COST_INFINITE = 1000
357 enum stride_status
359 UNKNOWN_STRIDE = 0,
360 KNOWN_STRIDE = 1
363 enum phi_adjust_status
365 NOT_PHI_ADJUST = 0,
366 PHI_ADJUST = 1
369 enum count_phis_status
371 DONT_COUNT_PHIS = 0,
372 COUNT_PHIS = 1
375 /* Pointer map embodying a mapping from statements to candidates. */
376 static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
378 /* Obstack for candidates. */
379 static struct obstack cand_obstack;
381 /* Obstack for candidate chains. */
382 static struct obstack chain_obstack;
384 /* An array INCR_VEC of incr_infos is used during analysis of related
385 candidates having an SSA name for a stride. INCR_VEC_LEN describes
386 its current length. MAX_INCR_VEC_LEN is used to avoid costly
387 pathological cases. */
388 static incr_info_t incr_vec;
389 static unsigned incr_vec_len;
390 const int MAX_INCR_VEC_LEN = 16;
392 /* For a chain of candidates with unknown stride, indicates whether or not
393 we must generate pointer arithmetic when replacing statements. */
394 static bool address_arithmetic_p;
396 /* Forward function declarations. */
397 static slsr_cand_t base_cand_from_table (tree);
398 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
399 static bool legal_cast_p_1 (tree, tree);
401 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
403 static slsr_cand_t
404 lookup_cand (cand_idx idx)
406 return cand_vec[idx - 1];
409 /* Helper for hashing a candidate chain header. */
411 struct cand_chain_hasher : typed_noop_remove <cand_chain>
413 typedef cand_chain value_type;
414 typedef cand_chain compare_type;
415 static inline hashval_t hash (const value_type *);
416 static inline bool equal (const value_type *, const compare_type *);
419 inline hashval_t
420 cand_chain_hasher::hash (const value_type *p)
422 tree base_expr = p->base_expr;
423 return iterative_hash_expr (base_expr, 0);
426 inline bool
427 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
429 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
432 /* Hash table embodying a mapping from base exprs to chains of candidates. */
433 static hash_table<cand_chain_hasher> *base_cand_map;
435 /* Pointer map used by tree_to_aff_combination_expand. */
436 static hash_map<tree, name_expansion *> *name_expansions;
437 /* Pointer map embodying a mapping from bases to alternative bases. */
438 static hash_map<tree, tree> *alt_base_map;
440 /* Given BASE, use the tree affine combiniation facilities to
441 find the underlying tree expression for BASE, with any
442 immediate offset excluded.
444 N.B. we should eliminate this backtracking with better forward
445 analysis in a future release. */
447 static tree
448 get_alternative_base (tree base)
450 tree *result = alt_base_map->get (base);
452 if (result == NULL)
454 tree expr;
455 aff_tree aff;
457 tree_to_aff_combination_expand (base, TREE_TYPE (base),
458 &aff, &name_expansions);
459 aff.offset = 0;
460 expr = aff_combination_to_tree (&aff);
462 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
464 return expr == base ? NULL : expr;
467 return *result;
470 /* Look in the candidate table for a CAND_PHI that defines BASE and
471 return it if found; otherwise return NULL. */
473 static cand_idx
474 find_phi_def (tree base)
476 slsr_cand_t c;
478 if (TREE_CODE (base) != SSA_NAME)
479 return 0;
481 c = base_cand_from_table (base);
483 if (!c || c->kind != CAND_PHI)
484 return 0;
486 return c->cand_num;
489 /* Helper routine for find_basis_for_candidate. May be called twice:
490 once for the candidate's base expr, and optionally again either for
491 the candidate's phi definition or for a CAND_REF's alternative base
492 expression. */
494 static slsr_cand_t
495 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
497 cand_chain mapping_key;
498 cand_chain_t chain;
499 slsr_cand_t basis = NULL;
501 // Limit potential of N^2 behavior for long candidate chains.
502 int iters = 0;
503 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
505 mapping_key.base_expr = base_expr;
506 chain = base_cand_map->find (&mapping_key);
508 for (; chain && iters < max_iters; chain = chain->next, ++iters)
510 slsr_cand_t one_basis = chain->cand;
512 if (one_basis->kind != c->kind
513 || one_basis->cand_stmt == c->cand_stmt
514 || !operand_equal_p (one_basis->stride, c->stride, 0)
515 || !types_compatible_p (one_basis->cand_type, c->cand_type)
516 || !dominated_by_p (CDI_DOMINATORS,
517 gimple_bb (c->cand_stmt),
518 gimple_bb (one_basis->cand_stmt)))
519 continue;
521 if (!basis || basis->cand_num < one_basis->cand_num)
522 basis = one_basis;
525 return basis;
528 /* Use the base expr from candidate C to look for possible candidates
529 that can serve as a basis for C. Each potential basis must also
530 appear in a block that dominates the candidate statement and have
531 the same stride and type. If more than one possible basis exists,
532 the one with highest index in the vector is chosen; this will be
533 the most immediately dominating basis. */
535 static int
536 find_basis_for_candidate (slsr_cand_t c)
538 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
540 /* If a candidate doesn't have a basis using its base expression,
541 it may have a basis hidden by one or more intervening phis. */
542 if (!basis && c->def_phi)
544 basic_block basis_bb, phi_bb;
545 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
546 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
548 if (basis)
550 /* A hidden basis must dominate the phi-definition of the
551 candidate's base name. */
552 phi_bb = gimple_bb (phi_cand->cand_stmt);
553 basis_bb = gimple_bb (basis->cand_stmt);
555 if (phi_bb == basis_bb
556 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
558 basis = NULL;
559 c->basis = 0;
562 /* If we found a hidden basis, estimate additional dead-code
563 savings if the phi and its feeding statements can be removed. */
564 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
565 c->dead_savings += phi_cand->dead_savings;
569 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
571 tree alt_base_expr = get_alternative_base (c->base_expr);
572 if (alt_base_expr)
573 basis = find_basis_for_base_expr (c, alt_base_expr);
576 if (basis)
578 c->sibling = basis->dependent;
579 basis->dependent = c->cand_num;
580 return basis->cand_num;
583 return 0;
586 /* Record a mapping from BASE to C, indicating that C may potentially serve
587 as a basis using that base expression. BASE may be the same as
588 C->BASE_EXPR; alternatively BASE can be a different tree that share the
589 underlining expression of C->BASE_EXPR. */
591 static void
592 record_potential_basis (slsr_cand_t c, tree base)
594 cand_chain_t node;
595 cand_chain **slot;
597 gcc_assert (base);
599 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
600 node->base_expr = base;
601 node->cand = c;
602 node->next = NULL;
603 slot = base_cand_map->find_slot (node, INSERT);
605 if (*slot)
607 cand_chain_t head = (cand_chain_t) (*slot);
608 node->next = head->next;
609 head->next = node;
611 else
612 *slot = node;
615 /* Allocate storage for a new candidate and initialize its fields.
616 Attempt to find a basis for the candidate.
618 For CAND_REF, an alternative base may also be recorded and used
619 to find a basis. This helps cases where the expression hidden
620 behind BASE (which is usually an SSA_NAME) has immediate offset,
621 e.g.
623 a2[i][j] = 1;
624 a2[i + 20][j] = 2; */
626 static slsr_cand_t
627 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
628 const widest_int &index, tree stride, tree ctype,
629 unsigned savings)
631 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
632 sizeof (slsr_cand));
633 c->cand_stmt = gs;
634 c->base_expr = base;
635 c->stride = stride;
636 c->index = index;
637 c->cand_type = ctype;
638 c->kind = kind;
639 c->cand_num = cand_vec.length () + 1;
640 c->next_interp = 0;
641 c->dependent = 0;
642 c->sibling = 0;
643 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
644 c->dead_savings = savings;
646 cand_vec.safe_push (c);
648 if (kind == CAND_PHI)
649 c->basis = 0;
650 else
651 c->basis = find_basis_for_candidate (c);
653 record_potential_basis (c, base);
654 if (flag_expensive_optimizations && kind == CAND_REF)
656 tree alt_base = get_alternative_base (base);
657 if (alt_base)
658 record_potential_basis (c, alt_base);
661 return c;
664 /* Determine the target cost of statement GS when compiling according
665 to SPEED. */
667 static int
668 stmt_cost (gimple gs, bool speed)
670 tree lhs, rhs1, rhs2;
671 enum machine_mode lhs_mode;
673 gcc_assert (is_gimple_assign (gs));
674 lhs = gimple_assign_lhs (gs);
675 rhs1 = gimple_assign_rhs1 (gs);
676 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
678 switch (gimple_assign_rhs_code (gs))
680 case MULT_EXPR:
681 rhs2 = gimple_assign_rhs2 (gs);
683 if (tree_fits_shwi_p (rhs2))
684 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
686 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
687 return mul_cost (speed, lhs_mode);
689 case PLUS_EXPR:
690 case POINTER_PLUS_EXPR:
691 case MINUS_EXPR:
692 return add_cost (speed, lhs_mode);
694 case NEGATE_EXPR:
695 return neg_cost (speed, lhs_mode);
697 case NOP_EXPR:
698 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
700 /* Note that we don't assign costs to copies that in most cases
701 will go away. */
702 default:
706 gcc_unreachable ();
707 return 0;
710 /* Look up the defining statement for BASE_IN and return a pointer
711 to its candidate in the candidate table, if any; otherwise NULL.
712 Only CAND_ADD and CAND_MULT candidates are returned. */
714 static slsr_cand_t
715 base_cand_from_table (tree base_in)
717 slsr_cand_t *result;
719 gimple def = SSA_NAME_DEF_STMT (base_in);
720 if (!def)
721 return (slsr_cand_t) NULL;
723 result = stmt_cand_map->get (def);
725 if (result && (*result)->kind != CAND_REF)
726 return *result;
728 return (slsr_cand_t) NULL;
731 /* Add an entry to the statement-to-candidate mapping. */
733 static void
734 add_cand_for_stmt (gimple gs, slsr_cand_t c)
736 gcc_assert (!stmt_cand_map->put (gs, c));
739 /* Given PHI which contains a phi statement, determine whether it
740 satisfies all the requirements of a phi candidate. If so, create
741 a candidate. Note that a CAND_PHI never has a basis itself, but
742 is used to help find a basis for subsequent candidates. */
744 static void
745 slsr_process_phi (gimple_phi phi, bool speed)
747 unsigned i;
748 tree arg0_base = NULL_TREE, base_type;
749 slsr_cand_t c;
750 struct loop *cand_loop = gimple_bb (phi)->loop_father;
751 unsigned savings = 0;
753 /* A CAND_PHI requires each of its arguments to have the same
754 derived base name. (See the module header commentary for a
755 definition of derived base names.) Furthermore, all feeding
756 definitions must be in the same position in the loop hierarchy
757 as PHI. */
759 for (i = 0; i < gimple_phi_num_args (phi); i++)
761 slsr_cand_t arg_cand;
762 tree arg = gimple_phi_arg_def (phi, i);
763 tree derived_base_name = NULL_TREE;
764 gimple arg_stmt = NULL;
765 basic_block arg_bb = NULL;
767 if (TREE_CODE (arg) != SSA_NAME)
768 return;
770 arg_cand = base_cand_from_table (arg);
772 if (arg_cand)
774 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
776 if (!arg_cand->next_interp)
777 return;
779 arg_cand = lookup_cand (arg_cand->next_interp);
782 if (!integer_onep (arg_cand->stride))
783 return;
785 derived_base_name = arg_cand->base_expr;
786 arg_stmt = arg_cand->cand_stmt;
787 arg_bb = gimple_bb (arg_stmt);
789 /* Gather potential dead code savings if the phi statement
790 can be removed later on. */
791 if (has_single_use (arg))
793 if (gimple_code (arg_stmt) == GIMPLE_PHI)
794 savings += arg_cand->dead_savings;
795 else
796 savings += stmt_cost (arg_stmt, speed);
799 else
801 derived_base_name = arg;
803 if (SSA_NAME_IS_DEFAULT_DEF (arg))
804 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
805 else
806 gimple_bb (SSA_NAME_DEF_STMT (arg));
809 if (!arg_bb || arg_bb->loop_father != cand_loop)
810 return;
812 if (i == 0)
813 arg0_base = derived_base_name;
814 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
815 return;
818 /* Create the candidate. "alloc_cand_and_find_basis" is named
819 misleadingly for this case, as no basis will be sought for a
820 CAND_PHI. */
821 base_type = TREE_TYPE (arg0_base);
823 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
824 0, integer_one_node, base_type, savings);
826 /* Add the candidate to the statement-candidate mapping. */
827 add_cand_for_stmt (phi, c);
830 /* Given PBASE which is a pointer to tree, look up the defining
831 statement for it and check whether the candidate is in the
832 form of:
834 X = B + (1 * S), S is integer constant
835 X = B + (i * S), S is integer one
837 If so, set PBASE to the candidate's base_expr and return double
838 int (i * S).
839 Otherwise, just return double int zero. */
841 static widest_int
842 backtrace_base_for_ref (tree *pbase)
844 tree base_in = *pbase;
845 slsr_cand_t base_cand;
847 STRIP_NOPS (base_in);
849 /* Strip off widening conversion(s) to handle cases where
850 e.g. 'B' is widened from an 'int' in order to calculate
851 a 64-bit address. */
852 if (CONVERT_EXPR_P (base_in)
853 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
854 base_in = get_unwidened (base_in, NULL_TREE);
856 if (TREE_CODE (base_in) != SSA_NAME)
857 return 0;
859 base_cand = base_cand_from_table (base_in);
861 while (base_cand && base_cand->kind != CAND_PHI)
863 if (base_cand->kind == CAND_ADD
864 && base_cand->index == 1
865 && TREE_CODE (base_cand->stride) == INTEGER_CST)
867 /* X = B + (1 * S), S is integer constant. */
868 *pbase = base_cand->base_expr;
869 return wi::to_widest (base_cand->stride);
871 else if (base_cand->kind == CAND_ADD
872 && TREE_CODE (base_cand->stride) == INTEGER_CST
873 && integer_onep (base_cand->stride))
875 /* X = B + (i * S), S is integer one. */
876 *pbase = base_cand->base_expr;
877 return base_cand->index;
880 if (base_cand->next_interp)
881 base_cand = lookup_cand (base_cand->next_interp);
882 else
883 base_cand = NULL;
886 return 0;
889 /* Look for the following pattern:
891 *PBASE: MEM_REF (T1, C1)
893 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
895 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
897 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
899 *PINDEX: C4 * BITS_PER_UNIT
901 If not present, leave the input values unchanged and return FALSE.
902 Otherwise, modify the input values as follows and return TRUE:
904 *PBASE: T1
905 *POFFSET: MULT_EXPR (T2, C3)
906 *PINDEX: C1 + (C2 * C3) + C4
908 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
909 will be further restructured to:
911 *PBASE: T1
912 *POFFSET: MULT_EXPR (T2', C3)
913 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
915 static bool
916 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
917 tree *ptype)
919 tree base = *pbase, offset = *poffset;
920 widest_int index = *pindex;
921 tree mult_op0, t1, t2, type;
922 widest_int c1, c2, c3, c4, c5;
924 if (!base
925 || !offset
926 || TREE_CODE (base) != MEM_REF
927 || TREE_CODE (offset) != MULT_EXPR
928 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
929 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
930 return false;
932 t1 = TREE_OPERAND (base, 0);
933 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
934 type = TREE_TYPE (TREE_OPERAND (base, 1));
936 mult_op0 = TREE_OPERAND (offset, 0);
937 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
939 if (TREE_CODE (mult_op0) == PLUS_EXPR)
941 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
943 t2 = TREE_OPERAND (mult_op0, 0);
944 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
946 else
947 return false;
949 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
951 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
953 t2 = TREE_OPERAND (mult_op0, 0);
954 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
956 else
957 return false;
959 else
961 t2 = mult_op0;
962 c2 = 0;
965 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
966 c5 = backtrace_base_for_ref (&t2);
968 *pbase = t1;
969 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
970 wide_int_to_tree (sizetype, c3));
971 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
972 *ptype = type;
974 return true;
977 /* Given GS which contains a data reference, create a CAND_REF entry in
978 the candidate table and attempt to find a basis. */
980 static void
981 slsr_process_ref (gimple gs)
983 tree ref_expr, base, offset, type;
984 HOST_WIDE_INT bitsize, bitpos;
985 enum machine_mode mode;
986 int unsignedp, volatilep;
987 slsr_cand_t c;
989 if (gimple_vdef (gs))
990 ref_expr = gimple_assign_lhs (gs);
991 else
992 ref_expr = gimple_assign_rhs1 (gs);
994 if (!handled_component_p (ref_expr)
995 || TREE_CODE (ref_expr) == BIT_FIELD_REF
996 || (TREE_CODE (ref_expr) == COMPONENT_REF
997 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
998 return;
1000 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1001 &unsignedp, &volatilep, false);
1002 widest_int index = bitpos;
1004 if (!restructure_reference (&base, &offset, &index, &type))
1005 return;
1007 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1008 type, 0);
1010 /* Add the candidate to the statement-candidate mapping. */
1011 add_cand_for_stmt (gs, c);
1014 /* Create a candidate entry for a statement GS, where GS multiplies
1015 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1016 about the two SSA names into the new candidate. Return the new
1017 candidate. */
1019 static slsr_cand_t
1020 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1022 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1023 widest_int index;
1024 unsigned savings = 0;
1025 slsr_cand_t c;
1026 slsr_cand_t base_cand = base_cand_from_table (base_in);
1028 /* Look at all interpretations of the base candidate, if necessary,
1029 to find information to propagate into this candidate. */
1030 while (base_cand && !base && base_cand->kind != CAND_PHI)
1033 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1035 /* Y = (B + i') * 1
1036 X = Y * Z
1037 ================
1038 X = (B + i') * Z */
1039 base = base_cand->base_expr;
1040 index = base_cand->index;
1041 stride = stride_in;
1042 ctype = base_cand->cand_type;
1043 if (has_single_use (base_in))
1044 savings = (base_cand->dead_savings
1045 + stmt_cost (base_cand->cand_stmt, speed));
1047 else if (base_cand->kind == CAND_ADD
1048 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1050 /* Y = B + (i' * S), S constant
1051 X = Y * Z
1052 ============================
1053 X = B + ((i' * S) * Z) */
1054 base = base_cand->base_expr;
1055 index = base_cand->index * wi::to_widest (base_cand->stride);
1056 stride = stride_in;
1057 ctype = base_cand->cand_type;
1058 if (has_single_use (base_in))
1059 savings = (base_cand->dead_savings
1060 + stmt_cost (base_cand->cand_stmt, speed));
1063 if (base_cand->next_interp)
1064 base_cand = lookup_cand (base_cand->next_interp);
1065 else
1066 base_cand = NULL;
1069 if (!base)
1071 /* No interpretations had anything useful to propagate, so
1072 produce X = (Y + 0) * Z. */
1073 base = base_in;
1074 index = 0;
1075 stride = stride_in;
1076 ctype = TREE_TYPE (base_in);
1079 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1080 ctype, savings);
1081 return c;
1084 /* Create a candidate entry for a statement GS, where GS multiplies
1085 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1086 information about BASE_IN into the new candidate. Return the new
1087 candidate. */
1089 static slsr_cand_t
1090 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1092 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1093 widest_int index, temp;
1094 unsigned savings = 0;
1095 slsr_cand_t c;
1096 slsr_cand_t base_cand = base_cand_from_table (base_in);
1098 /* Look at all interpretations of the base candidate, if necessary,
1099 to find information to propagate into this candidate. */
1100 while (base_cand && !base && base_cand->kind != CAND_PHI)
1102 if (base_cand->kind == CAND_MULT
1103 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1105 /* Y = (B + i') * S, S constant
1106 X = Y * c
1107 ============================
1108 X = (B + i') * (S * c) */
1109 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1110 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1112 base = base_cand->base_expr;
1113 index = base_cand->index;
1114 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1115 ctype = base_cand->cand_type;
1116 if (has_single_use (base_in))
1117 savings = (base_cand->dead_savings
1118 + stmt_cost (base_cand->cand_stmt, speed));
1121 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1123 /* Y = B + (i' * 1)
1124 X = Y * c
1125 ===========================
1126 X = (B + i') * c */
1127 base = base_cand->base_expr;
1128 index = base_cand->index;
1129 stride = stride_in;
1130 ctype = base_cand->cand_type;
1131 if (has_single_use (base_in))
1132 savings = (base_cand->dead_savings
1133 + stmt_cost (base_cand->cand_stmt, speed));
1135 else if (base_cand->kind == CAND_ADD
1136 && base_cand->index == 1
1137 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1139 /* Y = B + (1 * S), S constant
1140 X = Y * c
1141 ===========================
1142 X = (B + S) * c */
1143 base = base_cand->base_expr;
1144 index = wi::to_widest (base_cand->stride);
1145 stride = stride_in;
1146 ctype = base_cand->cand_type;
1147 if (has_single_use (base_in))
1148 savings = (base_cand->dead_savings
1149 + stmt_cost (base_cand->cand_stmt, speed));
1152 if (base_cand->next_interp)
1153 base_cand = lookup_cand (base_cand->next_interp);
1154 else
1155 base_cand = NULL;
1158 if (!base)
1160 /* No interpretations had anything useful to propagate, so
1161 produce X = (Y + 0) * c. */
1162 base = base_in;
1163 index = 0;
1164 stride = stride_in;
1165 ctype = TREE_TYPE (base_in);
1168 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1169 ctype, savings);
1170 return c;
1173 /* Given GS which is a multiply of scalar integers, make an appropriate
1174 entry in the candidate table. If this is a multiply of two SSA names,
1175 create two CAND_MULT interpretations and attempt to find a basis for
1176 each of them. Otherwise, create a single CAND_MULT and attempt to
1177 find a basis. */
1179 static void
1180 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1182 slsr_cand_t c, c2;
1184 /* If this is a multiply of an SSA name with itself, it is highly
1185 unlikely that we will get a strength reduction opportunity, so
1186 don't record it as a candidate. This simplifies the logic for
1187 finding a basis, so if this is removed that must be considered. */
1188 if (rhs1 == rhs2)
1189 return;
1191 if (TREE_CODE (rhs2) == SSA_NAME)
1193 /* Record an interpretation of this statement in the candidate table
1194 assuming RHS1 is the base expression and RHS2 is the stride. */
1195 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1197 /* Add the first interpretation to the statement-candidate mapping. */
1198 add_cand_for_stmt (gs, c);
1200 /* Record another interpretation of this statement assuming RHS1
1201 is the stride and RHS2 is the base expression. */
1202 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1203 c->next_interp = c2->cand_num;
1205 else
1207 /* Record an interpretation for the multiply-immediate. */
1208 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1210 /* Add the interpretation to the statement-candidate mapping. */
1211 add_cand_for_stmt (gs, c);
1215 /* Create a candidate entry for a statement GS, where GS adds two
1216 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1217 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1218 information about the two SSA names into the new candidate.
1219 Return the new candidate. */
1221 static slsr_cand_t
1222 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1223 bool subtract_p, bool speed)
1225 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1226 widest_int index;
1227 unsigned savings = 0;
1228 slsr_cand_t c;
1229 slsr_cand_t base_cand = base_cand_from_table (base_in);
1230 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1232 /* The most useful transformation is a multiply-immediate feeding
1233 an add or subtract. Look for that first. */
1234 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1236 if (addend_cand->kind == CAND_MULT
1237 && addend_cand->index == 0
1238 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1240 /* Z = (B + 0) * S, S constant
1241 X = Y +/- Z
1242 ===========================
1243 X = Y + ((+/-1 * S) * B) */
1244 base = base_in;
1245 index = wi::to_widest (addend_cand->stride);
1246 if (subtract_p)
1247 index = -index;
1248 stride = addend_cand->base_expr;
1249 ctype = TREE_TYPE (base_in);
1250 if (has_single_use (addend_in))
1251 savings = (addend_cand->dead_savings
1252 + stmt_cost (addend_cand->cand_stmt, speed));
1255 if (addend_cand->next_interp)
1256 addend_cand = lookup_cand (addend_cand->next_interp);
1257 else
1258 addend_cand = NULL;
1261 while (base_cand && !base && base_cand->kind != CAND_PHI)
1263 if (base_cand->kind == CAND_ADD
1264 && (base_cand->index == 0
1265 || operand_equal_p (base_cand->stride,
1266 integer_zero_node, 0)))
1268 /* Y = B + (i' * S), i' * S = 0
1269 X = Y +/- Z
1270 ============================
1271 X = B + (+/-1 * Z) */
1272 base = base_cand->base_expr;
1273 index = subtract_p ? -1 : 1;
1274 stride = addend_in;
1275 ctype = base_cand->cand_type;
1276 if (has_single_use (base_in))
1277 savings = (base_cand->dead_savings
1278 + stmt_cost (base_cand->cand_stmt, speed));
1280 else if (subtract_p)
1282 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1284 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1286 if (subtrahend_cand->kind == CAND_MULT
1287 && subtrahend_cand->index == 0
1288 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1290 /* Z = (B + 0) * S, S constant
1291 X = Y - Z
1292 ===========================
1293 Value: X = Y + ((-1 * S) * B) */
1294 base = base_in;
1295 index = wi::to_widest (subtrahend_cand->stride);
1296 index = -index;
1297 stride = subtrahend_cand->base_expr;
1298 ctype = TREE_TYPE (base_in);
1299 if (has_single_use (addend_in))
1300 savings = (subtrahend_cand->dead_savings
1301 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1304 if (subtrahend_cand->next_interp)
1305 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1306 else
1307 subtrahend_cand = NULL;
1311 if (base_cand->next_interp)
1312 base_cand = lookup_cand (base_cand->next_interp);
1313 else
1314 base_cand = NULL;
1317 if (!base)
1319 /* No interpretations had anything useful to propagate, so
1320 produce X = Y + (1 * Z). */
1321 base = base_in;
1322 index = subtract_p ? -1 : 1;
1323 stride = addend_in;
1324 ctype = TREE_TYPE (base_in);
1327 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1328 ctype, savings);
1329 return c;
1332 /* Create a candidate entry for a statement GS, where GS adds SSA
1333 name BASE_IN to constant INDEX_IN. Propagate any known information
1334 about BASE_IN into the new candidate. Return the new candidate. */
1336 static slsr_cand_t
1337 create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
1338 bool speed)
1340 enum cand_kind kind = CAND_ADD;
1341 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1342 widest_int index, multiple;
1343 unsigned savings = 0;
1344 slsr_cand_t c;
1345 slsr_cand_t base_cand = base_cand_from_table (base_in);
1347 while (base_cand && !base && base_cand->kind != CAND_PHI)
1349 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1351 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1352 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1353 sign, &multiple))
1355 /* Y = (B + i') * S, S constant, c = kS for some integer k
1356 X = Y + c
1357 ============================
1358 X = (B + (i'+ k)) * S
1360 Y = B + (i' * S), S constant, c = kS for some integer k
1361 X = Y + c
1362 ============================
1363 X = (B + (i'+ k)) * S */
1364 kind = base_cand->kind;
1365 base = base_cand->base_expr;
1366 index = base_cand->index + multiple;
1367 stride = base_cand->stride;
1368 ctype = base_cand->cand_type;
1369 if (has_single_use (base_in))
1370 savings = (base_cand->dead_savings
1371 + stmt_cost (base_cand->cand_stmt, speed));
1374 if (base_cand->next_interp)
1375 base_cand = lookup_cand (base_cand->next_interp);
1376 else
1377 base_cand = NULL;
1380 if (!base)
1382 /* No interpretations had anything useful to propagate, so
1383 produce X = Y + (c * 1). */
1384 kind = CAND_ADD;
1385 base = base_in;
1386 index = index_in;
1387 stride = integer_one_node;
1388 ctype = TREE_TYPE (base_in);
1391 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1392 ctype, savings);
1393 return c;
1396 /* Given GS which is an add or subtract of scalar integers or pointers,
1397 make at least one appropriate entry in the candidate table. */
1399 static void
1400 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1402 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1403 slsr_cand_t c = NULL, c2;
1405 if (TREE_CODE (rhs2) == SSA_NAME)
1407 /* First record an interpretation assuming RHS1 is the base expression
1408 and RHS2 is the stride. But it doesn't make sense for the
1409 stride to be a pointer, so don't record a candidate in that case. */
1410 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1412 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1414 /* Add the first interpretation to the statement-candidate
1415 mapping. */
1416 add_cand_for_stmt (gs, c);
1419 /* If the two RHS operands are identical, or this is a subtract,
1420 we're done. */
1421 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1422 return;
1424 /* Otherwise, record another interpretation assuming RHS2 is the
1425 base expression and RHS1 is the stride, again provided that the
1426 stride is not a pointer. */
1427 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1429 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1430 if (c)
1431 c->next_interp = c2->cand_num;
1432 else
1433 add_cand_for_stmt (gs, c2);
1436 else
1438 /* Record an interpretation for the add-immediate. */
1439 widest_int index = wi::to_widest (rhs2);
1440 if (subtract_p)
1441 index = -index;
1443 c = create_add_imm_cand (gs, rhs1, index, speed);
1445 /* Add the interpretation to the statement-candidate mapping. */
1446 add_cand_for_stmt (gs, c);
1450 /* Given GS which is a negate of a scalar integer, make an appropriate
1451 entry in the candidate table. A negate is equivalent to a multiply
1452 by -1. */
1454 static void
1455 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1457 /* Record a CAND_MULT interpretation for the multiply by -1. */
1458 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1460 /* Add the interpretation to the statement-candidate mapping. */
1461 add_cand_for_stmt (gs, c);
1464 /* Help function for legal_cast_p, operating on two trees. Checks
1465 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1466 for more details. */
1468 static bool
1469 legal_cast_p_1 (tree lhs, tree rhs)
1471 tree lhs_type, rhs_type;
1472 unsigned lhs_size, rhs_size;
1473 bool lhs_wraps, rhs_wraps;
1475 lhs_type = TREE_TYPE (lhs);
1476 rhs_type = TREE_TYPE (rhs);
1477 lhs_size = TYPE_PRECISION (lhs_type);
1478 rhs_size = TYPE_PRECISION (rhs_type);
1479 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1480 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1482 if (lhs_size < rhs_size
1483 || (rhs_wraps && !lhs_wraps)
1484 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1485 return false;
1487 return true;
1490 /* Return TRUE if GS is a statement that defines an SSA name from
1491 a conversion and is legal for us to combine with an add and multiply
1492 in the candidate table. For example, suppose we have:
1494 A = B + i;
1495 C = (type) A;
1496 D = C * S;
1498 Without the type-cast, we would create a CAND_MULT for D with base B,
1499 index i, and stride S. We want to record this candidate only if it
1500 is equivalent to apply the type cast following the multiply:
1502 A = B + i;
1503 E = A * S;
1504 D = (type) E;
1506 We will record the type with the candidate for D. This allows us
1507 to use a similar previous candidate as a basis. If we have earlier seen
1509 A' = B + i';
1510 C' = (type) A';
1511 D' = C' * S;
1513 we can replace D with
1515 D = D' + (i - i') * S;
1517 But if moving the type-cast would change semantics, we mustn't do this.
1519 This is legitimate for casts from a non-wrapping integral type to
1520 any integral type of the same or larger size. It is not legitimate
1521 to convert a wrapping type to a non-wrapping type, or to a wrapping
1522 type of a different size. I.e., with a wrapping type, we must
1523 assume that the addition B + i could wrap, in which case performing
1524 the multiply before or after one of the "illegal" type casts will
1525 have different semantics. */
1527 static bool
1528 legal_cast_p (gimple gs, tree rhs)
1530 if (!is_gimple_assign (gs)
1531 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1532 return false;
1534 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1537 /* Given GS which is a cast to a scalar integer type, determine whether
1538 the cast is legal for strength reduction. If so, make at least one
1539 appropriate entry in the candidate table. */
1541 static void
1542 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1544 tree lhs, ctype;
1545 slsr_cand_t base_cand, c, c2;
1546 unsigned savings = 0;
1548 if (!legal_cast_p (gs, rhs1))
1549 return;
1551 lhs = gimple_assign_lhs (gs);
1552 base_cand = base_cand_from_table (rhs1);
1553 ctype = TREE_TYPE (lhs);
1555 if (base_cand && base_cand->kind != CAND_PHI)
1557 while (base_cand)
1559 /* Propagate all data from the base candidate except the type,
1560 which comes from the cast, and the base candidate's cast,
1561 which is no longer applicable. */
1562 if (has_single_use (rhs1))
1563 savings = (base_cand->dead_savings
1564 + stmt_cost (base_cand->cand_stmt, speed));
1566 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1567 base_cand->base_expr,
1568 base_cand->index, base_cand->stride,
1569 ctype, savings);
1570 if (base_cand->next_interp)
1571 base_cand = lookup_cand (base_cand->next_interp);
1572 else
1573 base_cand = NULL;
1576 else
1578 /* If nothing is known about the RHS, create fresh CAND_ADD and
1579 CAND_MULT interpretations:
1581 X = Y + (0 * 1)
1582 X = (Y + 0) * 1
1584 The first of these is somewhat arbitrary, but the choice of
1585 1 for the stride simplifies the logic for propagating casts
1586 into their uses. */
1587 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1588 0, integer_one_node, ctype, 0);
1589 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1590 0, integer_one_node, ctype, 0);
1591 c->next_interp = c2->cand_num;
1594 /* Add the first (or only) interpretation to the statement-candidate
1595 mapping. */
1596 add_cand_for_stmt (gs, c);
1599 /* Given GS which is a copy of a scalar integer type, make at least one
1600 appropriate entry in the candidate table.
1602 This interface is included for completeness, but is unnecessary
1603 if this pass immediately follows a pass that performs copy
1604 propagation, such as DOM. */
1606 static void
1607 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1609 slsr_cand_t base_cand, c, c2;
1610 unsigned savings = 0;
1612 base_cand = base_cand_from_table (rhs1);
1614 if (base_cand && base_cand->kind != CAND_PHI)
1616 while (base_cand)
1618 /* Propagate all data from the base candidate. */
1619 if (has_single_use (rhs1))
1620 savings = (base_cand->dead_savings
1621 + stmt_cost (base_cand->cand_stmt, speed));
1623 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1624 base_cand->base_expr,
1625 base_cand->index, base_cand->stride,
1626 base_cand->cand_type, savings);
1627 if (base_cand->next_interp)
1628 base_cand = lookup_cand (base_cand->next_interp);
1629 else
1630 base_cand = NULL;
1633 else
1635 /* If nothing is known about the RHS, create fresh CAND_ADD and
1636 CAND_MULT interpretations:
1638 X = Y + (0 * 1)
1639 X = (Y + 0) * 1
1641 The first of these is somewhat arbitrary, but the choice of
1642 1 for the stride simplifies the logic for propagating casts
1643 into their uses. */
1644 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1645 0, integer_one_node, TREE_TYPE (rhs1), 0);
1646 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1647 0, integer_one_node, TREE_TYPE (rhs1), 0);
1648 c->next_interp = c2->cand_num;
1651 /* Add the first (or only) interpretation to the statement-candidate
1652 mapping. */
1653 add_cand_for_stmt (gs, c);
1656 class find_candidates_dom_walker : public dom_walker
1658 public:
1659 find_candidates_dom_walker (cdi_direction direction)
1660 : dom_walker (direction) {}
1661 virtual void before_dom_children (basic_block);
1664 /* Find strength-reduction candidates in block BB. */
1666 void
1667 find_candidates_dom_walker::before_dom_children (basic_block bb)
1669 bool speed = optimize_bb_for_speed_p (bb);
1671 for (gimple_phi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1672 gsi_next (&gsi))
1673 slsr_process_phi (gsi.phi (), speed);
1675 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1676 gsi_next (&gsi))
1678 gimple gs = gsi_stmt (gsi);
1680 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1681 slsr_process_ref (gs);
1683 else if (is_gimple_assign (gs)
1684 && SCALAR_INT_MODE_P
1685 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1687 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1689 switch (gimple_assign_rhs_code (gs))
1691 case MULT_EXPR:
1692 case PLUS_EXPR:
1693 rhs1 = gimple_assign_rhs1 (gs);
1694 rhs2 = gimple_assign_rhs2 (gs);
1695 /* Should never happen, but currently some buggy situations
1696 in earlier phases put constants in rhs1. */
1697 if (TREE_CODE (rhs1) != SSA_NAME)
1698 continue;
1699 break;
1701 /* Possible future opportunity: rhs1 of a ptr+ can be
1702 an ADDR_EXPR. */
1703 case POINTER_PLUS_EXPR:
1704 case MINUS_EXPR:
1705 rhs2 = gimple_assign_rhs2 (gs);
1706 /* Fall-through. */
1708 case NOP_EXPR:
1709 case MODIFY_EXPR:
1710 case NEGATE_EXPR:
1711 rhs1 = gimple_assign_rhs1 (gs);
1712 if (TREE_CODE (rhs1) != SSA_NAME)
1713 continue;
1714 break;
1716 default:
1720 switch (gimple_assign_rhs_code (gs))
1722 case MULT_EXPR:
1723 slsr_process_mul (gs, rhs1, rhs2, speed);
1724 break;
1726 case PLUS_EXPR:
1727 case POINTER_PLUS_EXPR:
1728 case MINUS_EXPR:
1729 slsr_process_add (gs, rhs1, rhs2, speed);
1730 break;
1732 case NEGATE_EXPR:
1733 slsr_process_neg (gs, rhs1, speed);
1734 break;
1736 case NOP_EXPR:
1737 slsr_process_cast (gs, rhs1, speed);
1738 break;
1740 case MODIFY_EXPR:
1741 slsr_process_copy (gs, rhs1, speed);
1742 break;
1744 default:
1751 /* Dump a candidate for debug. */
1753 static void
1754 dump_candidate (slsr_cand_t c)
1756 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1757 gimple_bb (c->cand_stmt)->index);
1758 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1759 switch (c->kind)
1761 case CAND_MULT:
1762 fputs (" MULT : (", dump_file);
1763 print_generic_expr (dump_file, c->base_expr, 0);
1764 fputs (" + ", dump_file);
1765 print_decs (c->index, dump_file);
1766 fputs (") * ", dump_file);
1767 print_generic_expr (dump_file, c->stride, 0);
1768 fputs (" : ", dump_file);
1769 break;
1770 case CAND_ADD:
1771 fputs (" ADD : ", dump_file);
1772 print_generic_expr (dump_file, c->base_expr, 0);
1773 fputs (" + (", dump_file);
1774 print_decs (c->index, dump_file);
1775 fputs (" * ", dump_file);
1776 print_generic_expr (dump_file, c->stride, 0);
1777 fputs (") : ", dump_file);
1778 break;
1779 case CAND_REF:
1780 fputs (" REF : ", dump_file);
1781 print_generic_expr (dump_file, c->base_expr, 0);
1782 fputs (" + (", dump_file);
1783 print_generic_expr (dump_file, c->stride, 0);
1784 fputs (") + ", dump_file);
1785 print_decs (c->index, dump_file);
1786 fputs (" : ", dump_file);
1787 break;
1788 case CAND_PHI:
1789 fputs (" PHI : ", dump_file);
1790 print_generic_expr (dump_file, c->base_expr, 0);
1791 fputs (" + (unknown * ", dump_file);
1792 print_generic_expr (dump_file, c->stride, 0);
1793 fputs (") : ", dump_file);
1794 break;
1795 default:
1796 gcc_unreachable ();
1798 print_generic_expr (dump_file, c->cand_type, 0);
1799 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1800 c->basis, c->dependent, c->sibling);
1801 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1802 c->next_interp, c->dead_savings);
1803 if (c->def_phi)
1804 fprintf (dump_file, " phi: %d\n", c->def_phi);
1805 fputs ("\n", dump_file);
1808 /* Dump the candidate vector for debug. */
1810 static void
1811 dump_cand_vec (void)
1813 unsigned i;
1814 slsr_cand_t c;
1816 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1818 FOR_EACH_VEC_ELT (cand_vec, i, c)
1819 dump_candidate (c);
1822 /* Callback used to dump the candidate chains hash table. */
1825 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1827 const_cand_chain_t chain = *slot;
1828 cand_chain_t p;
1830 print_generic_expr (dump_file, chain->base_expr, 0);
1831 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1833 for (p = chain->next; p; p = p->next)
1834 fprintf (dump_file, " -> %d", p->cand->cand_num);
1836 fputs ("\n", dump_file);
1837 return 1;
1840 /* Dump the candidate chains. */
1842 static void
1843 dump_cand_chains (void)
1845 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1846 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1847 (NULL);
1848 fputs ("\n", dump_file);
1851 /* Dump the increment vector for debug. */
1853 static void
1854 dump_incr_vec (void)
1856 if (dump_file && (dump_flags & TDF_DETAILS))
1858 unsigned i;
1860 fprintf (dump_file, "\nIncrement vector:\n\n");
1862 for (i = 0; i < incr_vec_len; i++)
1864 fprintf (dump_file, "%3d increment: ", i);
1865 print_decs (incr_vec[i].incr, dump_file);
1866 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1867 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1868 fputs ("\n initializer: ", dump_file);
1869 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1870 fputs ("\n\n", dump_file);
1875 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1876 data reference. */
1878 static void
1879 replace_ref (tree *expr, slsr_cand_t c)
1881 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1882 unsigned HOST_WIDE_INT misalign;
1883 unsigned align;
1885 /* Ensure the memory reference carries the minimum alignment
1886 requirement for the data type. See PR58041. */
1887 get_object_alignment_1 (*expr, &align, &misalign);
1888 if (misalign != 0)
1889 align = (misalign & -misalign);
1890 if (align < TYPE_ALIGN (acc_type))
1891 acc_type = build_aligned_type (acc_type, align);
1893 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1894 c->base_expr, c->stride);
1895 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1896 wide_int_to_tree (c->cand_type, c->index));
1898 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1899 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1900 TREE_OPERAND (mem_ref, 0)
1901 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1902 /*simple_p=*/true, NULL,
1903 /*before=*/true, GSI_SAME_STMT);
1904 copy_ref_info (mem_ref, *expr);
1905 *expr = mem_ref;
1906 update_stmt (c->cand_stmt);
1909 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1910 dependent of candidate C with an equivalent strength-reduced data
1911 reference. */
1913 static void
1914 replace_refs (slsr_cand_t c)
1916 if (dump_file && (dump_flags & TDF_DETAILS))
1918 fputs ("Replacing reference: ", dump_file);
1919 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1922 if (gimple_vdef (c->cand_stmt))
1924 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1925 replace_ref (lhs, c);
1927 else
1929 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1930 replace_ref (rhs, c);
1933 if (dump_file && (dump_flags & TDF_DETAILS))
1935 fputs ("With: ", dump_file);
1936 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1937 fputs ("\n", dump_file);
1940 if (c->sibling)
1941 replace_refs (lookup_cand (c->sibling));
1943 if (c->dependent)
1944 replace_refs (lookup_cand (c->dependent));
1947 /* Return TRUE if candidate C is dependent upon a PHI. */
1949 static bool
1950 phi_dependent_cand_p (slsr_cand_t c)
1952 /* A candidate is not necessarily dependent upon a PHI just because
1953 it has a phi definition for its base name. It may have a basis
1954 that relies upon the same phi definition, in which case the PHI
1955 is irrelevant to this candidate. */
1956 return (c->def_phi
1957 && c->basis
1958 && lookup_cand (c->basis)->def_phi != c->def_phi);
1961 /* Calculate the increment required for candidate C relative to
1962 its basis. */
1964 static widest_int
1965 cand_increment (slsr_cand_t c)
1967 slsr_cand_t basis;
1969 /* If the candidate doesn't have a basis, just return its own
1970 index. This is useful in record_increments to help us find
1971 an existing initializer. Also, if the candidate's basis is
1972 hidden by a phi, then its own index will be the increment
1973 from the newly introduced phi basis. */
1974 if (!c->basis || phi_dependent_cand_p (c))
1975 return c->index;
1977 basis = lookup_cand (c->basis);
1978 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1979 return c->index - basis->index;
1982 /* Calculate the increment required for candidate C relative to
1983 its basis. If we aren't going to generate pointer arithmetic
1984 for this candidate, return the absolute value of that increment
1985 instead. */
1987 static inline widest_int
1988 cand_abs_increment (slsr_cand_t c)
1990 widest_int increment = cand_increment (c);
1992 if (!address_arithmetic_p && wi::neg_p (increment))
1993 increment = -increment;
1995 return increment;
1998 /* Return TRUE iff candidate C has already been replaced under
1999 another interpretation. */
2001 static inline bool
2002 cand_already_replaced (slsr_cand_t c)
2004 return (gimple_bb (c->cand_stmt) == 0);
2007 /* Common logic used by replace_unconditional_candidate and
2008 replace_conditional_candidate. */
2010 static void
2011 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2013 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2014 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2016 /* It is highly unlikely, but possible, that the resulting
2017 bump doesn't fit in a HWI. Abandon the replacement
2018 in this case. This does not affect siblings or dependents
2019 of C. Restriction to signed HWI is conservative for unsigned
2020 types but allows for safe negation without twisted logic. */
2021 if (wi::fits_shwi_p (bump)
2022 && bump.to_shwi () != HOST_WIDE_INT_MIN
2023 /* It is not useful to replace casts, copies, or adds of
2024 an SSA name and a constant. */
2025 && cand_code != MODIFY_EXPR
2026 && cand_code != NOP_EXPR
2027 && cand_code != PLUS_EXPR
2028 && cand_code != POINTER_PLUS_EXPR
2029 && cand_code != MINUS_EXPR)
2031 enum tree_code code = PLUS_EXPR;
2032 tree bump_tree;
2033 gimple stmt_to_print = NULL;
2035 /* If the basis name and the candidate's LHS have incompatible
2036 types, introduce a cast. */
2037 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2038 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2039 if (wi::neg_p (bump))
2041 code = MINUS_EXPR;
2042 bump = -bump;
2045 bump_tree = wide_int_to_tree (target_type, bump);
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2049 fputs ("Replacing: ", dump_file);
2050 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2053 if (bump == 0)
2055 tree lhs = gimple_assign_lhs (c->cand_stmt);
2056 gimple_assign copy_stmt = gimple_build_assign (lhs, basis_name);
2057 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2058 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2059 gsi_replace (&gsi, copy_stmt, false);
2060 c->cand_stmt = copy_stmt;
2061 if (dump_file && (dump_flags & TDF_DETAILS))
2062 stmt_to_print = copy_stmt;
2064 else
2066 tree rhs1, rhs2;
2067 if (cand_code != NEGATE_EXPR) {
2068 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2069 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2071 if (cand_code != NEGATE_EXPR
2072 && ((operand_equal_p (rhs1, basis_name, 0)
2073 && operand_equal_p (rhs2, bump_tree, 0))
2074 || (operand_equal_p (rhs1, bump_tree, 0)
2075 && operand_equal_p (rhs2, basis_name, 0))))
2077 if (dump_file && (dump_flags & TDF_DETAILS))
2079 fputs ("(duplicate, not actually replacing)", dump_file);
2080 stmt_to_print = c->cand_stmt;
2083 else
2085 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2086 gimple_assign_set_rhs_with_ops (&gsi, code,
2087 basis_name, bump_tree);
2088 update_stmt (gsi_stmt (gsi));
2089 c->cand_stmt = gsi_stmt (gsi);
2090 if (dump_file && (dump_flags & TDF_DETAILS))
2091 stmt_to_print = gsi_stmt (gsi);
2095 if (dump_file && (dump_flags & TDF_DETAILS))
2097 fputs ("With: ", dump_file);
2098 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2099 fputs ("\n", dump_file);
2104 /* Replace candidate C with an add or subtract. Note that we only
2105 operate on CAND_MULTs with known strides, so we will never generate
2106 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2107 X = Y + ((i - i') * S), as described in the module commentary. The
2108 folded value ((i - i') * S) is referred to here as the "bump." */
2110 static void
2111 replace_unconditional_candidate (slsr_cand_t c)
2113 slsr_cand_t basis;
2115 if (cand_already_replaced (c))
2116 return;
2118 basis = lookup_cand (c->basis);
2119 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2121 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2124 /* Return the index in the increment vector of the given INCREMENT,
2125 or -1 if not found. The latter can occur if more than
2126 MAX_INCR_VEC_LEN increments have been found. */
2128 static inline int
2129 incr_vec_index (const widest_int &increment)
2131 unsigned i;
2133 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2136 if (i < incr_vec_len)
2137 return i;
2138 else
2139 return -1;
2142 /* Create a new statement along edge E to add BASIS_NAME to the product
2143 of INCREMENT and the stride of candidate C. Create and return a new
2144 SSA name from *VAR to be used as the LHS of the new statement.
2145 KNOWN_STRIDE is true iff C's stride is a constant. */
2147 static tree
2148 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2149 widest_int increment, edge e, location_t loc,
2150 bool known_stride)
2152 basic_block insert_bb;
2153 gimple_stmt_iterator gsi;
2154 tree lhs, basis_type;
2155 gimple_assign new_stmt;
2157 /* If the add candidate along this incoming edge has the same
2158 index as C's hidden basis, the hidden basis represents this
2159 edge correctly. */
2160 if (increment == 0)
2161 return basis_name;
2163 basis_type = TREE_TYPE (basis_name);
2164 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2166 if (known_stride)
2168 tree bump_tree;
2169 enum tree_code code = PLUS_EXPR;
2170 widest_int bump = increment * wi::to_widest (c->stride);
2171 if (wi::neg_p (bump))
2173 code = MINUS_EXPR;
2174 bump = -bump;
2177 bump_tree = wide_int_to_tree (basis_type, bump);
2178 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2179 bump_tree);
2181 else
2183 int i;
2184 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2185 i = incr_vec_index (negate_incr ? -increment : increment);
2186 gcc_assert (i >= 0);
2188 if (incr_vec[i].initializer)
2190 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2191 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2192 incr_vec[i].initializer);
2194 else if (increment == 1)
2195 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2196 c->stride);
2197 else if (increment == -1)
2198 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2199 c->stride);
2200 else
2201 gcc_unreachable ();
2204 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2205 gsi = gsi_last_bb (insert_bb);
2207 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2208 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2209 else
2210 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2212 gimple_set_location (new_stmt, loc);
2214 if (dump_file && (dump_flags & TDF_DETAILS))
2216 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2217 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2220 return lhs;
2223 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2224 is hidden by the phi node FROM_PHI, create a new phi node in the same
2225 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2226 with its phi arguments representing conditional adjustments to the
2227 hidden basis along conditional incoming paths. Those adjustments are
2228 made by creating add statements (and sometimes recursively creating
2229 phis) along those incoming paths. LOC is the location to attach to
2230 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2231 constant. */
2233 static tree
2234 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2235 location_t loc, bool known_stride)
2237 int i;
2238 tree name, phi_arg;
2239 gimple_phi phi;
2240 vec<tree> phi_args;
2241 slsr_cand_t basis = lookup_cand (c->basis);
2242 int nargs = gimple_phi_num_args (from_phi);
2243 basic_block phi_bb = gimple_bb (from_phi);
2244 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2245 phi_args.create (nargs);
2247 /* Process each argument of the existing phi that represents
2248 conditionally-executed add candidates. */
2249 for (i = 0; i < nargs; i++)
2251 edge e = (*phi_bb->preds)[i];
2252 tree arg = gimple_phi_arg_def (from_phi, i);
2253 tree feeding_def;
2255 /* If the phi argument is the base name of the CAND_PHI, then
2256 this incoming arc should use the hidden basis. */
2257 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2258 if (basis->index == 0)
2259 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2260 else
2262 widest_int incr = -basis->index;
2263 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2264 e, loc, known_stride);
2266 else
2268 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2270 /* If there is another phi along this incoming edge, we must
2271 process it in the same fashion to ensure that all basis
2272 adjustments are made along its incoming edges. */
2273 if (gimple_code (arg_def) == GIMPLE_PHI)
2274 feeding_def = create_phi_basis (c, arg_def, basis_name,
2275 loc, known_stride);
2276 else
2278 slsr_cand_t arg_cand = base_cand_from_table (arg);
2279 widest_int diff = arg_cand->index - basis->index;
2280 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2281 e, loc, known_stride);
2285 /* Because of recursion, we need to save the arguments in a vector
2286 so we can create the PHI statement all at once. Otherwise the
2287 storage for the half-created PHI can be reclaimed. */
2288 phi_args.safe_push (feeding_def);
2291 /* Create the new phi basis. */
2292 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2293 phi = create_phi_node (name, phi_bb);
2294 SSA_NAME_DEF_STMT (name) = phi;
2296 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2298 edge e = (*phi_bb->preds)[i];
2299 add_phi_arg (phi, phi_arg, e, loc);
2302 update_stmt (phi);
2304 if (dump_file && (dump_flags & TDF_DETAILS))
2306 fputs ("Introducing new phi basis: ", dump_file);
2307 print_gimple_stmt (dump_file, phi, 0, 0);
2310 return name;
2313 /* Given a candidate C whose basis is hidden by at least one intervening
2314 phi, introduce a matching number of new phis to represent its basis
2315 adjusted by conditional increments along possible incoming paths. Then
2316 replace C as though it were an unconditional candidate, using the new
2317 basis. */
2319 static void
2320 replace_conditional_candidate (slsr_cand_t c)
2322 tree basis_name, name;
2323 slsr_cand_t basis;
2324 location_t loc;
2326 /* Look up the LHS SSA name from C's basis. This will be the
2327 RHS1 of the adds we will introduce to create new phi arguments. */
2328 basis = lookup_cand (c->basis);
2329 basis_name = gimple_assign_lhs (basis->cand_stmt);
2331 /* Create a new phi statement which will represent C's true basis
2332 after the transformation is complete. */
2333 loc = gimple_location (c->cand_stmt);
2334 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2335 basis_name, loc, KNOWN_STRIDE);
2336 /* Replace C with an add of the new basis phi and a constant. */
2337 widest_int bump = c->index * wi::to_widest (c->stride);
2339 replace_mult_candidate (c, name, bump);
2342 /* Compute the expected costs of inserting basis adjustments for
2343 candidate C with phi-definition PHI. The cost of inserting
2344 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2345 which are themselves phi results, recursively calculate costs
2346 for those phis as well. */
2348 static int
2349 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2351 unsigned i;
2352 int cost = 0;
2353 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2355 /* If we work our way back to a phi that isn't dominated by the hidden
2356 basis, this isn't a candidate for replacement. Indicate this by
2357 returning an unreasonably high cost. It's not easy to detect
2358 these situations when determining the basis, so we defer the
2359 decision until now. */
2360 basic_block phi_bb = gimple_bb (phi);
2361 slsr_cand_t basis = lookup_cand (c->basis);
2362 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2364 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2365 return COST_INFINITE;
2367 for (i = 0; i < gimple_phi_num_args (phi); i++)
2369 tree arg = gimple_phi_arg_def (phi, i);
2371 if (arg != phi_cand->base_expr)
2373 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2375 if (gimple_code (arg_def) == GIMPLE_PHI)
2376 cost += phi_add_costs (arg_def, c, one_add_cost);
2377 else
2379 slsr_cand_t arg_cand = base_cand_from_table (arg);
2381 if (arg_cand->index != c->index)
2382 cost += one_add_cost;
2387 return cost;
2390 /* For candidate C, each sibling of candidate C, and each dependent of
2391 candidate C, determine whether the candidate is dependent upon a
2392 phi that hides its basis. If not, replace the candidate unconditionally.
2393 Otherwise, determine whether the cost of introducing compensation code
2394 for the candidate is offset by the gains from strength reduction. If
2395 so, replace the candidate and introduce the compensation code. */
2397 static void
2398 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2400 if (phi_dependent_cand_p (c))
2402 if (c->kind == CAND_MULT)
2404 /* A candidate dependent upon a phi will replace a multiply by
2405 a constant with an add, and will insert at most one add for
2406 each phi argument. Add these costs with the potential dead-code
2407 savings to determine profitability. */
2408 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2409 int mult_savings = stmt_cost (c->cand_stmt, speed);
2410 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2411 tree phi_result = gimple_phi_result (phi);
2412 int one_add_cost = add_cost (speed,
2413 TYPE_MODE (TREE_TYPE (phi_result)));
2414 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2415 int cost = add_costs - mult_savings - c->dead_savings;
2417 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2420 fprintf (dump_file, " add_costs = %d\n", add_costs);
2421 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2422 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2423 fprintf (dump_file, " cost = %d\n", cost);
2424 if (cost <= COST_NEUTRAL)
2425 fputs (" Replacing...\n", dump_file);
2426 else
2427 fputs (" Not replaced.\n", dump_file);
2430 if (cost <= COST_NEUTRAL)
2431 replace_conditional_candidate (c);
2434 else
2435 replace_unconditional_candidate (c);
2437 if (c->sibling)
2438 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2440 if (c->dependent)
2441 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2444 /* Count the number of candidates in the tree rooted at C that have
2445 not already been replaced under other interpretations. */
2447 static int
2448 count_candidates (slsr_cand_t c)
2450 unsigned count = cand_already_replaced (c) ? 0 : 1;
2452 if (c->sibling)
2453 count += count_candidates (lookup_cand (c->sibling));
2455 if (c->dependent)
2456 count += count_candidates (lookup_cand (c->dependent));
2458 return count;
2461 /* Increase the count of INCREMENT by one in the increment vector.
2462 INCREMENT is associated with candidate C. If INCREMENT is to be
2463 conditionally executed as part of a conditional candidate replacement,
2464 IS_PHI_ADJUST is true, otherwise false. If an initializer
2465 T_0 = stride * I is provided by a candidate that dominates all
2466 candidates with the same increment, also record T_0 for subsequent use. */
2468 static void
2469 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2471 bool found = false;
2472 unsigned i;
2474 /* Treat increments that differ only in sign as identical so as to
2475 share initializers, unless we are generating pointer arithmetic. */
2476 if (!address_arithmetic_p && wi::neg_p (increment))
2477 increment = -increment;
2479 for (i = 0; i < incr_vec_len; i++)
2481 if (incr_vec[i].incr == increment)
2483 incr_vec[i].count++;
2484 found = true;
2486 /* If we previously recorded an initializer that doesn't
2487 dominate this candidate, it's not going to be useful to
2488 us after all. */
2489 if (incr_vec[i].initializer
2490 && !dominated_by_p (CDI_DOMINATORS,
2491 gimple_bb (c->cand_stmt),
2492 incr_vec[i].init_bb))
2494 incr_vec[i].initializer = NULL_TREE;
2495 incr_vec[i].init_bb = NULL;
2498 break;
2502 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2504 /* The first time we see an increment, create the entry for it.
2505 If this is the root candidate which doesn't have a basis, set
2506 the count to zero. We're only processing it so it can possibly
2507 provide an initializer for other candidates. */
2508 incr_vec[incr_vec_len].incr = increment;
2509 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2510 incr_vec[incr_vec_len].cost = COST_INFINITE;
2512 /* Optimistically record the first occurrence of this increment
2513 as providing an initializer (if it does); we will revise this
2514 opinion later if it doesn't dominate all other occurrences.
2515 Exception: increments of -1, 0, 1 never need initializers;
2516 and phi adjustments don't ever provide initializers. */
2517 if (c->kind == CAND_ADD
2518 && !is_phi_adjust
2519 && c->index == increment
2520 && (wi::gts_p (increment, 1)
2521 || wi::lts_p (increment, -1))
2522 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2523 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2525 tree t0 = NULL_TREE;
2526 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2527 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2528 if (operand_equal_p (rhs1, c->base_expr, 0))
2529 t0 = rhs2;
2530 else if (operand_equal_p (rhs2, c->base_expr, 0))
2531 t0 = rhs1;
2532 if (t0
2533 && SSA_NAME_DEF_STMT (t0)
2534 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2536 incr_vec[incr_vec_len].initializer = t0;
2537 incr_vec[incr_vec_len++].init_bb
2538 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2540 else
2542 incr_vec[incr_vec_len].initializer = NULL_TREE;
2543 incr_vec[incr_vec_len++].init_bb = NULL;
2546 else
2548 incr_vec[incr_vec_len].initializer = NULL_TREE;
2549 incr_vec[incr_vec_len++].init_bb = NULL;
2554 /* Given phi statement PHI that hides a candidate from its BASIS, find
2555 the increments along each incoming arc (recursively handling additional
2556 phis that may be present) and record them. These increments are the
2557 difference in index between the index-adjusting statements and the
2558 index of the basis. */
2560 static void
2561 record_phi_increments (slsr_cand_t basis, gimple phi)
2563 unsigned i;
2564 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2566 for (i = 0; i < gimple_phi_num_args (phi); i++)
2568 tree arg = gimple_phi_arg_def (phi, i);
2570 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2572 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2574 if (gimple_code (arg_def) == GIMPLE_PHI)
2575 record_phi_increments (basis, arg_def);
2576 else
2578 slsr_cand_t arg_cand = base_cand_from_table (arg);
2579 widest_int diff = arg_cand->index - basis->index;
2580 record_increment (arg_cand, diff, PHI_ADJUST);
2586 /* Determine how many times each unique increment occurs in the set
2587 of candidates rooted at C's parent, recording the data in the
2588 increment vector. For each unique increment I, if an initializer
2589 T_0 = stride * I is provided by a candidate that dominates all
2590 candidates with the same increment, also record T_0 for subsequent
2591 use. */
2593 static void
2594 record_increments (slsr_cand_t c)
2596 if (!cand_already_replaced (c))
2598 if (!phi_dependent_cand_p (c))
2599 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2600 else
2602 /* A candidate with a basis hidden by a phi will have one
2603 increment for its relationship to the index represented by
2604 the phi, and potentially additional increments along each
2605 incoming edge. For the root of the dependency tree (which
2606 has no basis), process just the initial index in case it has
2607 an initializer that can be used by subsequent candidates. */
2608 record_increment (c, c->index, NOT_PHI_ADJUST);
2610 if (c->basis)
2611 record_phi_increments (lookup_cand (c->basis),
2612 lookup_cand (c->def_phi)->cand_stmt);
2616 if (c->sibling)
2617 record_increments (lookup_cand (c->sibling));
2619 if (c->dependent)
2620 record_increments (lookup_cand (c->dependent));
2623 /* Add up and return the costs of introducing add statements that
2624 require the increment INCR on behalf of candidate C and phi
2625 statement PHI. Accumulate into *SAVINGS the potential savings
2626 from removing existing statements that feed PHI and have no other
2627 uses. */
2629 static int
2630 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
2632 unsigned i;
2633 int cost = 0;
2634 slsr_cand_t basis = lookup_cand (c->basis);
2635 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2637 for (i = 0; i < gimple_phi_num_args (phi); i++)
2639 tree arg = gimple_phi_arg_def (phi, i);
2641 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2643 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2645 if (gimple_code (arg_def) == GIMPLE_PHI)
2647 int feeding_savings = 0;
2648 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2649 if (has_single_use (gimple_phi_result (arg_def)))
2650 *savings += feeding_savings;
2652 else
2654 slsr_cand_t arg_cand = base_cand_from_table (arg);
2655 widest_int diff = arg_cand->index - basis->index;
2657 if (incr == diff)
2659 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2660 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2661 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2662 if (has_single_use (lhs))
2663 *savings += stmt_cost (arg_cand->cand_stmt, true);
2669 return cost;
2672 /* Return the first candidate in the tree rooted at C that has not
2673 already been replaced, favoring siblings over dependents. */
2675 static slsr_cand_t
2676 unreplaced_cand_in_tree (slsr_cand_t c)
2678 if (!cand_already_replaced (c))
2679 return c;
2681 if (c->sibling)
2683 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2684 if (sib)
2685 return sib;
2688 if (c->dependent)
2690 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2691 if (dep)
2692 return dep;
2695 return NULL;
2698 /* Return TRUE if the candidates in the tree rooted at C should be
2699 optimized for speed, else FALSE. We estimate this based on the block
2700 containing the most dominant candidate in the tree that has not yet
2701 been replaced. */
2703 static bool
2704 optimize_cands_for_speed_p (slsr_cand_t c)
2706 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2707 gcc_assert (c2);
2708 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2711 /* Add COST_IN to the lowest cost of any dependent path starting at
2712 candidate C or any of its siblings, counting only candidates along
2713 such paths with increment INCR. Assume that replacing a candidate
2714 reduces cost by REPL_SAVINGS. Also account for savings from any
2715 statements that would go dead. If COUNT_PHIS is true, include
2716 costs of introducing feeding statements for conditional candidates. */
2718 static int
2719 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2720 const widest_int &incr, bool count_phis)
2722 int local_cost, sib_cost, savings = 0;
2723 widest_int cand_incr = cand_abs_increment (c);
2725 if (cand_already_replaced (c))
2726 local_cost = cost_in;
2727 else if (incr == cand_incr)
2728 local_cost = cost_in - repl_savings - c->dead_savings;
2729 else
2730 local_cost = cost_in - c->dead_savings;
2732 if (count_phis
2733 && phi_dependent_cand_p (c)
2734 && !cand_already_replaced (c))
2736 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2737 local_cost += phi_incr_cost (c, incr, phi, &savings);
2739 if (has_single_use (gimple_phi_result (phi)))
2740 local_cost -= savings;
2743 if (c->dependent)
2744 local_cost = lowest_cost_path (local_cost, repl_savings,
2745 lookup_cand (c->dependent), incr,
2746 count_phis);
2748 if (c->sibling)
2750 sib_cost = lowest_cost_path (cost_in, repl_savings,
2751 lookup_cand (c->sibling), incr,
2752 count_phis);
2753 local_cost = MIN (local_cost, sib_cost);
2756 return local_cost;
2759 /* Compute the total savings that would accrue from all replacements
2760 in the candidate tree rooted at C, counting only candidates with
2761 increment INCR. Assume that replacing a candidate reduces cost
2762 by REPL_SAVINGS. Also account for savings from statements that
2763 would go dead. */
2765 static int
2766 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2767 bool count_phis)
2769 int savings = 0;
2770 widest_int cand_incr = cand_abs_increment (c);
2772 if (incr == cand_incr && !cand_already_replaced (c))
2773 savings += repl_savings + c->dead_savings;
2775 if (count_phis
2776 && phi_dependent_cand_p (c)
2777 && !cand_already_replaced (c))
2779 int phi_savings = 0;
2780 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2781 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2783 if (has_single_use (gimple_phi_result (phi)))
2784 savings += phi_savings;
2787 if (c->dependent)
2788 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2789 count_phis);
2791 if (c->sibling)
2792 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2793 count_phis);
2795 return savings;
2798 /* Use target-specific costs to determine and record which increments
2799 in the current candidate tree are profitable to replace, assuming
2800 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2801 the candidate tree.
2803 One slight limitation here is that we don't account for the possible
2804 introduction of casts in some cases. See replace_one_candidate for
2805 the cases where these are introduced. This should probably be cleaned
2806 up sometime. */
2808 static void
2809 analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
2811 unsigned i;
2813 for (i = 0; i < incr_vec_len; i++)
2815 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2817 /* If somehow this increment is bigger than a HWI, we won't
2818 be optimizing candidates that use it. And if the increment
2819 has a count of zero, nothing will be done with it. */
2820 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2821 incr_vec[i].cost = COST_INFINITE;
2823 /* Increments of 0, 1, and -1 are always profitable to replace,
2824 because they always replace a multiply or add with an add or
2825 copy, and may cause one or more existing instructions to go
2826 dead. Exception: -1 can't be assumed to be profitable for
2827 pointer addition. */
2828 else if (incr == 0
2829 || incr == 1
2830 || (incr == -1
2831 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2832 != POINTER_PLUS_EXPR)))
2833 incr_vec[i].cost = COST_NEUTRAL;
2835 /* FORNOW: If we need to add an initializer, give up if a cast from
2836 the candidate's type to its stride's type can lose precision.
2837 This could eventually be handled better by expressly retaining the
2838 result of a cast to a wider type in the stride. Example:
2840 short int _1;
2841 _2 = (int) _1;
2842 _3 = _2 * 10;
2843 _4 = x + _3; ADD: x + (10 * _1) : int
2844 _5 = _2 * 15;
2845 _6 = x + _3; ADD: x + (15 * _1) : int
2847 Right now replacing _6 would cause insertion of an initializer
2848 of the form "short int T = _1 * 5;" followed by a cast to
2849 int, which could overflow incorrectly. Had we recorded _2 or
2850 (int)_1 as the stride, this wouldn't happen. However, doing
2851 this breaks other opportunities, so this will require some
2852 care. */
2853 else if (!incr_vec[i].initializer
2854 && TREE_CODE (first_dep->stride) != INTEGER_CST
2855 && !legal_cast_p_1 (first_dep->stride,
2856 gimple_assign_lhs (first_dep->cand_stmt)))
2858 incr_vec[i].cost = COST_INFINITE;
2860 /* If we need to add an initializer, make sure we don't introduce
2861 a multiply by a pointer type, which can happen in certain cast
2862 scenarios. FIXME: When cleaning up these cast issues, we can
2863 afford to introduce the multiply provided we cast out to an
2864 unsigned int of appropriate size. */
2865 else if (!incr_vec[i].initializer
2866 && TREE_CODE (first_dep->stride) != INTEGER_CST
2867 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2869 incr_vec[i].cost = COST_INFINITE;
2871 /* For any other increment, if this is a multiply candidate, we
2872 must introduce a temporary T and initialize it with
2873 T_0 = stride * increment. When optimizing for speed, walk the
2874 candidate tree to calculate the best cost reduction along any
2875 path; if it offsets the fixed cost of inserting the initializer,
2876 replacing the increment is profitable. When optimizing for
2877 size, instead calculate the total cost reduction from replacing
2878 all candidates with this increment. */
2879 else if (first_dep->kind == CAND_MULT)
2881 int cost = mult_by_coeff_cost (incr, mode, speed);
2882 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2883 if (speed)
2884 cost = lowest_cost_path (cost, repl_savings, first_dep,
2885 incr_vec[i].incr, COUNT_PHIS);
2886 else
2887 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2888 COUNT_PHIS);
2890 incr_vec[i].cost = cost;
2893 /* If this is an add candidate, the initializer may already
2894 exist, so only calculate the cost of the initializer if it
2895 doesn't. We are replacing one add with another here, so the
2896 known replacement savings is zero. We will account for removal
2897 of dead instructions in lowest_cost_path or total_savings. */
2898 else
2900 int cost = 0;
2901 if (!incr_vec[i].initializer)
2902 cost = mult_by_coeff_cost (incr, mode, speed);
2904 if (speed)
2905 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2906 DONT_COUNT_PHIS);
2907 else
2908 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2909 DONT_COUNT_PHIS);
2911 incr_vec[i].cost = cost;
2916 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2917 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2918 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2919 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2920 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2922 static basic_block
2923 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2924 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2926 basic_block ncd;
2928 if (!bb1)
2930 *where = c2;
2931 return bb2;
2934 if (!bb2)
2936 *where = c1;
2937 return bb1;
2940 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2942 /* If both candidates are in the same block, the earlier
2943 candidate wins. */
2944 if (bb1 == ncd && bb2 == ncd)
2946 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2947 *where = c2;
2948 else
2949 *where = c1;
2952 /* Otherwise, if one of them produced a candidate in the
2953 dominator, that one wins. */
2954 else if (bb1 == ncd)
2955 *where = c1;
2957 else if (bb2 == ncd)
2958 *where = c2;
2960 /* If neither matches the dominator, neither wins. */
2961 else
2962 *where = NULL;
2964 return ncd;
2967 /* Consider all candidates that feed PHI. Find the nearest common
2968 dominator of those candidates requiring the given increment INCR.
2969 Further find and return the nearest common dominator of this result
2970 with block NCD. If the returned block contains one or more of the
2971 candidates, return the earliest candidate in the block in *WHERE. */
2973 static basic_block
2974 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple_phi phi,
2975 basic_block ncd, slsr_cand_t *where)
2977 unsigned i;
2978 slsr_cand_t basis = lookup_cand (c->basis);
2979 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2981 for (i = 0; i < gimple_phi_num_args (phi); i++)
2983 tree arg = gimple_phi_arg_def (phi, i);
2985 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2987 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2989 if (gimple_code (arg_def) == GIMPLE_PHI)
2990 ncd = ncd_with_phi (c, incr, as_a <gimple_phi> (arg_def), ncd,
2991 where);
2992 else
2994 slsr_cand_t arg_cand = base_cand_from_table (arg);
2995 widest_int diff = arg_cand->index - basis->index;
2996 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2998 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2999 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3004 return ncd;
3007 /* Consider the candidate C together with any candidates that feed
3008 C's phi dependence (if any). Find and return the nearest common
3009 dominator of those candidates requiring the given increment INCR.
3010 If the returned block contains one or more of the candidates,
3011 return the earliest candidate in the block in *WHERE. */
3013 static basic_block
3014 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3016 basic_block ncd = NULL;
3018 if (cand_abs_increment (c) == incr)
3020 ncd = gimple_bb (c->cand_stmt);
3021 *where = c;
3024 if (phi_dependent_cand_p (c))
3025 ncd = ncd_with_phi (c, incr,
3026 as_a <gimple_phi> (lookup_cand (c->def_phi)->cand_stmt),
3027 ncd, where);
3029 return ncd;
3032 /* Consider all candidates in the tree rooted at C for which INCR
3033 represents the required increment of C relative to its basis.
3034 Find and return the basic block that most nearly dominates all
3035 such candidates. If the returned block contains one or more of
3036 the candidates, return the earliest candidate in the block in
3037 *WHERE. */
3039 static basic_block
3040 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3041 slsr_cand_t *where)
3043 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3044 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3046 /* First find the NCD of all siblings and dependents. */
3047 if (c->sibling)
3048 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3049 incr, &sib_where);
3050 if (c->dependent)
3051 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3052 incr, &dep_where);
3053 if (!sib_ncd && !dep_ncd)
3055 new_where = NULL;
3056 ncd = NULL;
3058 else if (sib_ncd && !dep_ncd)
3060 new_where = sib_where;
3061 ncd = sib_ncd;
3063 else if (dep_ncd && !sib_ncd)
3065 new_where = dep_where;
3066 ncd = dep_ncd;
3068 else
3069 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3070 dep_where, &new_where);
3072 /* If the candidate's increment doesn't match the one we're interested
3073 in (and nor do any increments for feeding defs of a phi-dependence),
3074 then the result depends only on siblings and dependents. */
3075 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3077 if (!this_ncd || cand_already_replaced (c))
3079 *where = new_where;
3080 return ncd;
3083 /* Otherwise, compare this candidate with the result from all siblings
3084 and dependents. */
3085 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3087 return ncd;
3090 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3092 static inline bool
3093 profitable_increment_p (unsigned index)
3095 return (incr_vec[index].cost <= COST_NEUTRAL);
3098 /* For each profitable increment in the increment vector not equal to
3099 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3100 dominator of all statements in the candidate chain rooted at C
3101 that require that increment, and insert an initializer
3102 T_0 = stride * increment at that location. Record T_0 with the
3103 increment record. */
3105 static void
3106 insert_initializers (slsr_cand_t c)
3108 unsigned i;
3110 for (i = 0; i < incr_vec_len; i++)
3112 basic_block bb;
3113 slsr_cand_t where = NULL;
3114 gimple_assign init_stmt;
3115 tree stride_type, new_name, incr_tree;
3116 widest_int incr = incr_vec[i].incr;
3118 if (!profitable_increment_p (i)
3119 || incr == 1
3120 || (incr == -1
3121 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3122 || incr == 0)
3123 continue;
3125 /* We may have already identified an existing initializer that
3126 will suffice. */
3127 if (incr_vec[i].initializer)
3129 if (dump_file && (dump_flags & TDF_DETAILS))
3131 fputs ("Using existing initializer: ", dump_file);
3132 print_gimple_stmt (dump_file,
3133 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3134 0, 0);
3136 continue;
3139 /* Find the block that most closely dominates all candidates
3140 with this increment. If there is at least one candidate in
3141 that block, the earliest one will be returned in WHERE. */
3142 bb = nearest_common_dominator_for_cands (c, incr, &where);
3144 /* Create a new SSA name to hold the initializer's value. */
3145 stride_type = TREE_TYPE (c->stride);
3146 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3147 incr_vec[i].initializer = new_name;
3149 /* Create the initializer and insert it in the latest possible
3150 dominating position. */
3151 incr_tree = wide_int_to_tree (stride_type, incr);
3152 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3153 c->stride, incr_tree);
3154 if (where)
3156 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3157 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3158 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3160 else
3162 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3163 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3165 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3166 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3167 else
3168 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3170 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3173 if (dump_file && (dump_flags & TDF_DETAILS))
3175 fputs ("Inserting initializer: ", dump_file);
3176 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3181 /* Return TRUE iff all required increments for candidates feeding PHI
3182 are profitable to replace on behalf of candidate C. */
3184 static bool
3185 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3187 unsigned i;
3188 slsr_cand_t basis = lookup_cand (c->basis);
3189 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3191 for (i = 0; i < gimple_phi_num_args (phi); i++)
3193 tree arg = gimple_phi_arg_def (phi, i);
3195 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3197 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3199 if (gimple_code (arg_def) == GIMPLE_PHI)
3201 if (!all_phi_incrs_profitable (c, arg_def))
3202 return false;
3204 else
3206 int j;
3207 slsr_cand_t arg_cand = base_cand_from_table (arg);
3208 widest_int increment = arg_cand->index - basis->index;
3210 if (!address_arithmetic_p && wi::neg_p (increment))
3211 increment = -increment;
3213 j = incr_vec_index (increment);
3215 if (dump_file && (dump_flags & TDF_DETAILS))
3217 fprintf (dump_file, " Conditional candidate %d, phi: ",
3218 c->cand_num);
3219 print_gimple_stmt (dump_file, phi, 0, 0);
3220 fputs (" increment: ", dump_file);
3221 print_decs (increment, dump_file);
3222 if (j < 0)
3223 fprintf (dump_file,
3224 "\n Not replaced; incr_vec overflow.\n");
3225 else {
3226 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3227 if (profitable_increment_p (j))
3228 fputs (" Replacing...\n", dump_file);
3229 else
3230 fputs (" Not replaced.\n", dump_file);
3234 if (j < 0 || !profitable_increment_p (j))
3235 return false;
3240 return true;
3243 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3244 type TO_TYPE, and insert it in front of the statement represented
3245 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3246 the new SSA name. */
3248 static tree
3249 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3251 tree cast_lhs;
3252 gimple_assign cast_stmt;
3253 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3255 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3256 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs,
3257 from_expr, NULL_TREE);
3258 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3259 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3261 if (dump_file && (dump_flags & TDF_DETAILS))
3263 fputs (" Inserting: ", dump_file);
3264 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3267 return cast_lhs;
3270 /* Replace the RHS of the statement represented by candidate C with
3271 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3272 leave C unchanged or just interchange its operands. The original
3273 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3274 If the replacement was made and we are doing a details dump,
3275 return the revised statement, else NULL. */
3277 static gimple
3278 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3279 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3280 slsr_cand_t c)
3282 if (new_code != old_code
3283 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3284 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3285 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3286 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3288 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3289 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3290 update_stmt (gsi_stmt (gsi));
3291 c->cand_stmt = gsi_stmt (gsi);
3293 if (dump_file && (dump_flags & TDF_DETAILS))
3294 return gsi_stmt (gsi);
3297 else if (dump_file && (dump_flags & TDF_DETAILS))
3298 fputs (" (duplicate, not actually replacing)\n", dump_file);
3300 return NULL;
3303 /* Strength-reduce the statement represented by candidate C by replacing
3304 it with an equivalent addition or subtraction. I is the index into
3305 the increment vector identifying C's increment. NEW_VAR is used to
3306 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3307 is the rhs1 to use in creating the add/subtract. */
3309 static void
3310 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3312 gimple stmt_to_print = NULL;
3313 tree orig_rhs1, orig_rhs2;
3314 tree rhs2;
3315 enum tree_code orig_code, repl_code;
3316 widest_int cand_incr;
3318 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3319 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3320 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3321 cand_incr = cand_increment (c);
3323 if (dump_file && (dump_flags & TDF_DETAILS))
3325 fputs ("Replacing: ", dump_file);
3326 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3327 stmt_to_print = c->cand_stmt;
3330 if (address_arithmetic_p)
3331 repl_code = POINTER_PLUS_EXPR;
3332 else
3333 repl_code = PLUS_EXPR;
3335 /* If the increment has an initializer T_0, replace the candidate
3336 statement with an add of the basis name and the initializer. */
3337 if (incr_vec[i].initializer)
3339 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3340 tree orig_type = TREE_TYPE (orig_rhs2);
3342 if (types_compatible_p (orig_type, init_type))
3343 rhs2 = incr_vec[i].initializer;
3344 else
3345 rhs2 = introduce_cast_before_cand (c, orig_type,
3346 incr_vec[i].initializer);
3348 if (incr_vec[i].incr != cand_incr)
3350 gcc_assert (repl_code == PLUS_EXPR);
3351 repl_code = MINUS_EXPR;
3354 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3355 orig_code, orig_rhs1, orig_rhs2,
3359 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3360 with a subtract of the stride from the basis name, a copy
3361 from the basis name, or an add of the stride to the basis
3362 name, respectively. It may be necessary to introduce a
3363 cast (or reuse an existing cast). */
3364 else if (cand_incr == 1)
3366 tree stride_type = TREE_TYPE (c->stride);
3367 tree orig_type = TREE_TYPE (orig_rhs2);
3369 if (types_compatible_p (orig_type, stride_type))
3370 rhs2 = c->stride;
3371 else
3372 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3374 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3375 orig_code, orig_rhs1, orig_rhs2,
3379 else if (cand_incr == -1)
3381 tree stride_type = TREE_TYPE (c->stride);
3382 tree orig_type = TREE_TYPE (orig_rhs2);
3383 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3385 if (types_compatible_p (orig_type, stride_type))
3386 rhs2 = c->stride;
3387 else
3388 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3390 if (orig_code != MINUS_EXPR
3391 || !operand_equal_p (basis_name, orig_rhs1, 0)
3392 || !operand_equal_p (rhs2, orig_rhs2, 0))
3394 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3395 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3396 update_stmt (gsi_stmt (gsi));
3397 c->cand_stmt = gsi_stmt (gsi);
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3400 stmt_to_print = gsi_stmt (gsi);
3402 else if (dump_file && (dump_flags & TDF_DETAILS))
3403 fputs (" (duplicate, not actually replacing)\n", dump_file);
3406 else if (cand_incr == 0)
3408 tree lhs = gimple_assign_lhs (c->cand_stmt);
3409 tree lhs_type = TREE_TYPE (lhs);
3410 tree basis_type = TREE_TYPE (basis_name);
3412 if (types_compatible_p (lhs_type, basis_type))
3414 gimple_assign copy_stmt = gimple_build_assign (lhs, basis_name);
3415 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3416 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3417 gsi_replace (&gsi, copy_stmt, false);
3418 c->cand_stmt = copy_stmt;
3420 if (dump_file && (dump_flags & TDF_DETAILS))
3421 stmt_to_print = copy_stmt;
3423 else
3425 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3426 gimple_assign cast_stmt =
3427 gimple_build_assign_with_ops (NOP_EXPR, lhs,
3428 basis_name,
3429 NULL_TREE);
3430 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3431 gsi_replace (&gsi, cast_stmt, false);
3432 c->cand_stmt = cast_stmt;
3434 if (dump_file && (dump_flags & TDF_DETAILS))
3435 stmt_to_print = cast_stmt;
3438 else
3439 gcc_unreachable ();
3441 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3443 fputs ("With: ", dump_file);
3444 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3445 fputs ("\n", dump_file);
3449 /* For each candidate in the tree rooted at C, replace it with
3450 an increment if such has been shown to be profitable. */
3452 static void
3453 replace_profitable_candidates (slsr_cand_t c)
3455 if (!cand_already_replaced (c))
3457 widest_int increment = cand_abs_increment (c);
3458 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3459 int i;
3461 i = incr_vec_index (increment);
3463 /* Only process profitable increments. Nothing useful can be done
3464 to a cast or copy. */
3465 if (i >= 0
3466 && profitable_increment_p (i)
3467 && orig_code != MODIFY_EXPR
3468 && orig_code != NOP_EXPR)
3470 if (phi_dependent_cand_p (c))
3472 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3474 if (all_phi_incrs_profitable (c, phi))
3476 /* Look up the LHS SSA name from C's basis. This will be
3477 the RHS1 of the adds we will introduce to create new
3478 phi arguments. */
3479 slsr_cand_t basis = lookup_cand (c->basis);
3480 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3482 /* Create a new phi statement that will represent C's true
3483 basis after the transformation is complete. */
3484 location_t loc = gimple_location (c->cand_stmt);
3485 tree name = create_phi_basis (c, phi, basis_name,
3486 loc, UNKNOWN_STRIDE);
3488 /* Replace C with an add of the new basis phi and the
3489 increment. */
3490 replace_one_candidate (c, i, name);
3493 else
3495 slsr_cand_t basis = lookup_cand (c->basis);
3496 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3497 replace_one_candidate (c, i, basis_name);
3502 if (c->sibling)
3503 replace_profitable_candidates (lookup_cand (c->sibling));
3505 if (c->dependent)
3506 replace_profitable_candidates (lookup_cand (c->dependent));
3509 /* Analyze costs of related candidates in the candidate vector,
3510 and make beneficial replacements. */
3512 static void
3513 analyze_candidates_and_replace (void)
3515 unsigned i;
3516 slsr_cand_t c;
3518 /* Each candidate that has a null basis and a non-null
3519 dependent is the root of a tree of related statements.
3520 Analyze each tree to determine a subset of those
3521 statements that can be replaced with maximum benefit. */
3522 FOR_EACH_VEC_ELT (cand_vec, i, c)
3524 slsr_cand_t first_dep;
3526 if (c->basis != 0 || c->dependent == 0)
3527 continue;
3529 if (dump_file && (dump_flags & TDF_DETAILS))
3530 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3531 c->cand_num);
3533 first_dep = lookup_cand (c->dependent);
3535 /* If this is a chain of CAND_REFs, unconditionally replace
3536 each of them with a strength-reduced data reference. */
3537 if (c->kind == CAND_REF)
3538 replace_refs (c);
3540 /* If the common stride of all related candidates is a known
3541 constant, each candidate without a phi-dependence can be
3542 profitably replaced. Each replaces a multiply by a single
3543 add, with the possibility that a feeding add also goes dead.
3544 A candidate with a phi-dependence is replaced only if the
3545 compensation code it requires is offset by the strength
3546 reduction savings. */
3547 else if (TREE_CODE (c->stride) == INTEGER_CST)
3548 replace_uncond_cands_and_profitable_phis (first_dep);
3550 /* When the stride is an SSA name, it may still be profitable
3551 to replace some or all of the dependent candidates, depending
3552 on whether the introduced increments can be reused, or are
3553 less expensive to calculate than the replaced statements. */
3554 else
3556 enum machine_mode mode;
3557 bool speed;
3559 /* Determine whether we'll be generating pointer arithmetic
3560 when replacing candidates. */
3561 address_arithmetic_p = (c->kind == CAND_ADD
3562 && POINTER_TYPE_P (c->cand_type));
3564 /* If all candidates have already been replaced under other
3565 interpretations, nothing remains to be done. */
3566 if (!count_candidates (c))
3567 continue;
3569 /* Construct an array of increments for this candidate chain. */
3570 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3571 incr_vec_len = 0;
3572 record_increments (c);
3574 /* Determine which increments are profitable to replace. */
3575 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3576 speed = optimize_cands_for_speed_p (c);
3577 analyze_increments (first_dep, mode, speed);
3579 /* Insert initializers of the form T_0 = stride * increment
3580 for use in profitable replacements. */
3581 insert_initializers (first_dep);
3582 dump_incr_vec ();
3584 /* Perform the replacements. */
3585 replace_profitable_candidates (first_dep);
3586 free (incr_vec);
3591 namespace {
3593 const pass_data pass_data_strength_reduction =
3595 GIMPLE_PASS, /* type */
3596 "slsr", /* name */
3597 OPTGROUP_NONE, /* optinfo_flags */
3598 TV_GIMPLE_SLSR, /* tv_id */
3599 ( PROP_cfg | PROP_ssa ), /* properties_required */
3600 0, /* properties_provided */
3601 0, /* properties_destroyed */
3602 0, /* todo_flags_start */
3603 0, /* todo_flags_finish */
3606 class pass_strength_reduction : public gimple_opt_pass
3608 public:
3609 pass_strength_reduction (gcc::context *ctxt)
3610 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3613 /* opt_pass methods: */
3614 virtual bool gate (function *) { return flag_tree_slsr; }
3615 virtual unsigned int execute (function *);
3617 }; // class pass_strength_reduction
3619 unsigned
3620 pass_strength_reduction::execute (function *fun)
3622 /* Create the obstack where candidates will reside. */
3623 gcc_obstack_init (&cand_obstack);
3625 /* Allocate the candidate vector. */
3626 cand_vec.create (128);
3628 /* Allocate the mapping from statements to candidate indices. */
3629 stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
3631 /* Create the obstack where candidate chains will reside. */
3632 gcc_obstack_init (&chain_obstack);
3634 /* Allocate the mapping from base expressions to candidate chains. */
3635 base_cand_map = new hash_table<cand_chain_hasher> (500);
3637 /* Allocate the mapping from bases to alternative bases. */
3638 alt_base_map = new hash_map<tree, tree>;
3640 /* Initialize the loop optimizer. We need to detect flow across
3641 back edges, and this gives us dominator information as well. */
3642 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3644 /* Walk the CFG in predominator order looking for strength reduction
3645 candidates. */
3646 find_candidates_dom_walker (CDI_DOMINATORS)
3647 .walk (fun->cfg->x_entry_block_ptr);
3649 if (dump_file && (dump_flags & TDF_DETAILS))
3651 dump_cand_vec ();
3652 dump_cand_chains ();
3655 delete alt_base_map;
3656 free_affine_expand_cache (&name_expansions);
3658 /* Analyze costs and make appropriate replacements. */
3659 analyze_candidates_and_replace ();
3661 loop_optimizer_finalize ();
3662 delete base_cand_map;
3663 base_cand_map = NULL;
3664 obstack_free (&chain_obstack, NULL);
3665 delete stmt_cand_map;
3666 cand_vec.release ();
3667 obstack_free (&cand_obstack, NULL);
3669 return 0;
3672 } // anon namespace
3674 gimple_opt_pass *
3675 make_pass_strength_reduction (gcc::context *ctxt)
3677 return new pass_strength_reduction (ctxt);