Make vect_model_store_cost take a vec_load_store_type
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blob521d7e942f36d4f978d5c54440f4aa7710bd1075
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2018 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
72 Any statement S0 that dominates S1 and is of the form:
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
77 is called a "basis" for S1. In both cases, S1 may be replaced by
79 S1': X = Y + (i - i') * S,
81 where (i - i') * S is folded to the extent possible.
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
152 Conditional candidates
153 ======================
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
181 where the bracketed instructions may go dead.
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
219 /* The kind of candidate. */
220 enum cand_kind
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
228 struct slsr_cand_d
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
236 /* The stride S. */
237 tree stride;
239 /* The index constant i. */
240 widest_int index;
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
249 /* The type to be used to interpret the stride field when the stride
250 is not a constant. Normally the same as the type of the recorded
251 stride, but when the stride has been cast we need to maintain that
252 knowledge in order to make legal substitutions without losing
253 precision. When the stride is a constant, this will be sizetype. */
254 tree stride_type;
256 /* The kind of candidate (CAND_MULT, etc.). */
257 enum cand_kind kind;
259 /* Index of this candidate in the candidate vector. */
260 cand_idx cand_num;
262 /* Index of the next candidate record for the same statement.
263 A statement may be useful in more than one way (e.g., due to
264 commutativity). So we can have multiple "interpretations"
265 of a statement. */
266 cand_idx next_interp;
268 /* Index of the basis statement S0, if any, in the candidate vector. */
269 cand_idx basis;
271 /* First candidate for which this candidate is a basis, if one exists. */
272 cand_idx dependent;
274 /* Next candidate having the same basis as this one. */
275 cand_idx sibling;
277 /* If this is a conditional candidate, the CAND_PHI candidate
278 that defines the base SSA name B. */
279 cand_idx def_phi;
281 /* Savings that can be expected from eliminating dead code if this
282 candidate is replaced. */
283 int dead_savings;
285 /* For PHI candidates, use a visited flag to keep from processing the
286 same PHI twice from multiple paths. */
287 int visited;
289 /* We sometimes have to cache a phi basis with a phi candidate to
290 avoid processing it twice. Valid only if visited==1. */
291 tree cached_basis;
294 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
295 typedef const struct slsr_cand_d *const_slsr_cand_t;
297 /* Pointers to candidates are chained together as part of a mapping
298 from base expressions to the candidates that use them. */
300 struct cand_chain_d
302 /* Base expression for the chain of candidates: often, but not
303 always, an SSA name. */
304 tree base_expr;
306 /* Pointer to a candidate. */
307 slsr_cand_t cand;
309 /* Chain pointer. */
310 struct cand_chain_d *next;
314 typedef struct cand_chain_d cand_chain, *cand_chain_t;
315 typedef const struct cand_chain_d *const_cand_chain_t;
317 /* Information about a unique "increment" associated with candidates
318 having an SSA name for a stride. An increment is the difference
319 between the index of the candidate and the index of its basis,
320 i.e., (i - i') as discussed in the module commentary.
322 When we are not going to generate address arithmetic we treat
323 increments that differ only in sign as the same, allowing sharing
324 of the cost of initializers. The absolute value of the increment
325 is stored in the incr_info. */
327 struct incr_info_d
329 /* The increment that relates a candidate to its basis. */
330 widest_int incr;
332 /* How many times the increment occurs in the candidate tree. */
333 unsigned count;
335 /* Cost of replacing candidates using this increment. Negative and
336 zero costs indicate replacement should be performed. */
337 int cost;
339 /* If this increment is profitable but is not -1, 0, or 1, it requires
340 an initializer T_0 = stride * incr to be found or introduced in the
341 nearest common dominator of all candidates. This field holds T_0
342 for subsequent use. */
343 tree initializer;
345 /* If the initializer was found to already exist, this is the block
346 where it was found. */
347 basic_block init_bb;
350 typedef struct incr_info_d incr_info, *incr_info_t;
352 /* Candidates are maintained in a vector. If candidate X dominates
353 candidate Y, then X appears before Y in the vector; but the
354 converse does not necessarily hold. */
355 static vec<slsr_cand_t> cand_vec;
357 enum cost_consts
359 COST_NEUTRAL = 0,
360 COST_INFINITE = 1000
363 enum stride_status
365 UNKNOWN_STRIDE = 0,
366 KNOWN_STRIDE = 1
369 enum phi_adjust_status
371 NOT_PHI_ADJUST = 0,
372 PHI_ADJUST = 1
375 enum count_phis_status
377 DONT_COUNT_PHIS = 0,
378 COUNT_PHIS = 1
381 /* Constrain how many PHI nodes we will visit for a conditional
382 candidate (depth and breadth). */
383 const int MAX_SPREAD = 16;
385 /* Pointer map embodying a mapping from statements to candidates. */
386 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
388 /* Obstack for candidates. */
389 static struct obstack cand_obstack;
391 /* Obstack for candidate chains. */
392 static struct obstack chain_obstack;
394 /* An array INCR_VEC of incr_infos is used during analysis of related
395 candidates having an SSA name for a stride. INCR_VEC_LEN describes
396 its current length. MAX_INCR_VEC_LEN is used to avoid costly
397 pathological cases. */
398 static incr_info_t incr_vec;
399 static unsigned incr_vec_len;
400 const int MAX_INCR_VEC_LEN = 16;
402 /* For a chain of candidates with unknown stride, indicates whether or not
403 we must generate pointer arithmetic when replacing statements. */
404 static bool address_arithmetic_p;
406 /* Forward function declarations. */
407 static slsr_cand_t base_cand_from_table (tree);
408 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
409 static bool legal_cast_p_1 (tree, tree);
411 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
413 static slsr_cand_t
414 lookup_cand (cand_idx idx)
416 return cand_vec[idx - 1];
419 /* Helper for hashing a candidate chain header. */
421 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
423 static inline hashval_t hash (const cand_chain *);
424 static inline bool equal (const cand_chain *, const cand_chain *);
427 inline hashval_t
428 cand_chain_hasher::hash (const cand_chain *p)
430 tree base_expr = p->base_expr;
431 return iterative_hash_expr (base_expr, 0);
434 inline bool
435 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
437 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
440 /* Hash table embodying a mapping from base exprs to chains of candidates. */
441 static hash_table<cand_chain_hasher> *base_cand_map;
443 /* Pointer map used by tree_to_aff_combination_expand. */
444 static hash_map<tree, name_expansion *> *name_expansions;
445 /* Pointer map embodying a mapping from bases to alternative bases. */
446 static hash_map<tree, tree> *alt_base_map;
448 /* Given BASE, use the tree affine combiniation facilities to
449 find the underlying tree expression for BASE, with any
450 immediate offset excluded.
452 N.B. we should eliminate this backtracking with better forward
453 analysis in a future release. */
455 static tree
456 get_alternative_base (tree base)
458 tree *result = alt_base_map->get (base);
460 if (result == NULL)
462 tree expr;
463 aff_tree aff;
465 tree_to_aff_combination_expand (base, TREE_TYPE (base),
466 &aff, &name_expansions);
467 aff.offset = 0;
468 expr = aff_combination_to_tree (&aff);
470 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
472 return expr == base ? NULL : expr;
475 return *result;
478 /* Look in the candidate table for a CAND_PHI that defines BASE and
479 return it if found; otherwise return NULL. */
481 static cand_idx
482 find_phi_def (tree base)
484 slsr_cand_t c;
486 if (TREE_CODE (base) != SSA_NAME)
487 return 0;
489 c = base_cand_from_table (base);
491 if (!c || c->kind != CAND_PHI
492 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c->cand_stmt)))
493 return 0;
495 return c->cand_num;
498 /* Determine whether all uses of NAME are directly or indirectly
499 used by STMT. That is, we want to know whether if STMT goes
500 dead, the definition of NAME also goes dead. */
501 static bool
502 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
504 gimple *use_stmt;
505 imm_use_iterator iter;
506 bool retval = true;
508 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
510 if (use_stmt == stmt || is_gimple_debug (use_stmt))
511 continue;
513 if (!is_gimple_assign (use_stmt)
514 || !gimple_get_lhs (use_stmt)
515 || !is_gimple_reg (gimple_get_lhs (use_stmt))
516 || recurse >= 10
517 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
518 recurse + 1))
520 retval = false;
521 BREAK_FROM_IMM_USE_STMT (iter);
525 return retval;
528 /* Helper routine for find_basis_for_candidate. May be called twice:
529 once for the candidate's base expr, and optionally again either for
530 the candidate's phi definition or for a CAND_REF's alternative base
531 expression. */
533 static slsr_cand_t
534 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
536 cand_chain mapping_key;
537 cand_chain_t chain;
538 slsr_cand_t basis = NULL;
540 // Limit potential of N^2 behavior for long candidate chains.
541 int iters = 0;
542 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
544 mapping_key.base_expr = base_expr;
545 chain = base_cand_map->find (&mapping_key);
547 for (; chain && iters < max_iters; chain = chain->next, ++iters)
549 slsr_cand_t one_basis = chain->cand;
551 if (one_basis->kind != c->kind
552 || one_basis->cand_stmt == c->cand_stmt
553 || !operand_equal_p (one_basis->stride, c->stride, 0)
554 || !types_compatible_p (one_basis->cand_type, c->cand_type)
555 || !types_compatible_p (one_basis->stride_type, c->stride_type)
556 || !dominated_by_p (CDI_DOMINATORS,
557 gimple_bb (c->cand_stmt),
558 gimple_bb (one_basis->cand_stmt)))
559 continue;
561 tree lhs = gimple_assign_lhs (one_basis->cand_stmt);
562 if (lhs && TREE_CODE (lhs) == SSA_NAME
563 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
564 continue;
566 if (!basis || basis->cand_num < one_basis->cand_num)
567 basis = one_basis;
570 return basis;
573 /* Use the base expr from candidate C to look for possible candidates
574 that can serve as a basis for C. Each potential basis must also
575 appear in a block that dominates the candidate statement and have
576 the same stride and type. If more than one possible basis exists,
577 the one with highest index in the vector is chosen; this will be
578 the most immediately dominating basis. */
580 static int
581 find_basis_for_candidate (slsr_cand_t c)
583 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
585 /* If a candidate doesn't have a basis using its base expression,
586 it may have a basis hidden by one or more intervening phis. */
587 if (!basis && c->def_phi)
589 basic_block basis_bb, phi_bb;
590 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
591 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
593 if (basis)
595 /* A hidden basis must dominate the phi-definition of the
596 candidate's base name. */
597 phi_bb = gimple_bb (phi_cand->cand_stmt);
598 basis_bb = gimple_bb (basis->cand_stmt);
600 if (phi_bb == basis_bb
601 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
603 basis = NULL;
604 c->basis = 0;
607 /* If we found a hidden basis, estimate additional dead-code
608 savings if the phi and its feeding statements can be removed. */
609 tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
610 if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
611 c->dead_savings += phi_cand->dead_savings;
615 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
617 tree alt_base_expr = get_alternative_base (c->base_expr);
618 if (alt_base_expr)
619 basis = find_basis_for_base_expr (c, alt_base_expr);
622 if (basis)
624 c->sibling = basis->dependent;
625 basis->dependent = c->cand_num;
626 return basis->cand_num;
629 return 0;
632 /* Record a mapping from BASE to C, indicating that C may potentially serve
633 as a basis using that base expression. BASE may be the same as
634 C->BASE_EXPR; alternatively BASE can be a different tree that share the
635 underlining expression of C->BASE_EXPR. */
637 static void
638 record_potential_basis (slsr_cand_t c, tree base)
640 cand_chain_t node;
641 cand_chain **slot;
643 gcc_assert (base);
645 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
646 node->base_expr = base;
647 node->cand = c;
648 node->next = NULL;
649 slot = base_cand_map->find_slot (node, INSERT);
651 if (*slot)
653 cand_chain_t head = (cand_chain_t) (*slot);
654 node->next = head->next;
655 head->next = node;
657 else
658 *slot = node;
661 /* Allocate storage for a new candidate and initialize its fields.
662 Attempt to find a basis for the candidate.
664 For CAND_REF, an alternative base may also be recorded and used
665 to find a basis. This helps cases where the expression hidden
666 behind BASE (which is usually an SSA_NAME) has immediate offset,
667 e.g.
669 a2[i][j] = 1;
670 a2[i + 20][j] = 2; */
672 static slsr_cand_t
673 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
674 const widest_int &index, tree stride, tree ctype,
675 tree stype, unsigned savings)
677 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
678 sizeof (slsr_cand));
679 c->cand_stmt = gs;
680 c->base_expr = base;
681 c->stride = stride;
682 c->index = index;
683 c->cand_type = ctype;
684 c->stride_type = stype;
685 c->kind = kind;
686 c->cand_num = cand_vec.length () + 1;
687 c->next_interp = 0;
688 c->dependent = 0;
689 c->sibling = 0;
690 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
691 c->dead_savings = savings;
692 c->visited = 0;
693 c->cached_basis = NULL_TREE;
695 cand_vec.safe_push (c);
697 if (kind == CAND_PHI)
698 c->basis = 0;
699 else
700 c->basis = find_basis_for_candidate (c);
702 record_potential_basis (c, base);
703 if (flag_expensive_optimizations && kind == CAND_REF)
705 tree alt_base = get_alternative_base (base);
706 if (alt_base)
707 record_potential_basis (c, alt_base);
710 return c;
713 /* Determine the target cost of statement GS when compiling according
714 to SPEED. */
716 static int
717 stmt_cost (gimple *gs, bool speed)
719 tree lhs, rhs1, rhs2;
720 machine_mode lhs_mode;
722 gcc_assert (is_gimple_assign (gs));
723 lhs = gimple_assign_lhs (gs);
724 rhs1 = gimple_assign_rhs1 (gs);
725 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
727 switch (gimple_assign_rhs_code (gs))
729 case MULT_EXPR:
730 rhs2 = gimple_assign_rhs2 (gs);
732 if (tree_fits_shwi_p (rhs2))
733 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
735 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
736 return mul_cost (speed, lhs_mode);
738 case PLUS_EXPR:
739 case POINTER_PLUS_EXPR:
740 case MINUS_EXPR:
741 return add_cost (speed, lhs_mode);
743 case NEGATE_EXPR:
744 return neg_cost (speed, lhs_mode);
746 CASE_CONVERT:
747 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
749 /* Note that we don't assign costs to copies that in most cases
750 will go away. */
751 case SSA_NAME:
752 return 0;
754 default:
758 gcc_unreachable ();
759 return 0;
762 /* Look up the defining statement for BASE_IN and return a pointer
763 to its candidate in the candidate table, if any; otherwise NULL.
764 Only CAND_ADD and CAND_MULT candidates are returned. */
766 static slsr_cand_t
767 base_cand_from_table (tree base_in)
769 slsr_cand_t *result;
771 gimple *def = SSA_NAME_DEF_STMT (base_in);
772 if (!def)
773 return (slsr_cand_t) NULL;
775 result = stmt_cand_map->get (def);
777 if (result && (*result)->kind != CAND_REF)
778 return *result;
780 return (slsr_cand_t) NULL;
783 /* Add an entry to the statement-to-candidate mapping. */
785 static void
786 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
788 gcc_assert (!stmt_cand_map->put (gs, c));
791 /* Given PHI which contains a phi statement, determine whether it
792 satisfies all the requirements of a phi candidate. If so, create
793 a candidate. Note that a CAND_PHI never has a basis itself, but
794 is used to help find a basis for subsequent candidates. */
796 static void
797 slsr_process_phi (gphi *phi, bool speed)
799 unsigned i;
800 tree arg0_base = NULL_TREE, base_type;
801 slsr_cand_t c;
802 struct loop *cand_loop = gimple_bb (phi)->loop_father;
803 unsigned savings = 0;
805 /* A CAND_PHI requires each of its arguments to have the same
806 derived base name. (See the module header commentary for a
807 definition of derived base names.) Furthermore, all feeding
808 definitions must be in the same position in the loop hierarchy
809 as PHI. */
811 for (i = 0; i < gimple_phi_num_args (phi); i++)
813 slsr_cand_t arg_cand;
814 tree arg = gimple_phi_arg_def (phi, i);
815 tree derived_base_name = NULL_TREE;
816 gimple *arg_stmt = NULL;
817 basic_block arg_bb = NULL;
819 if (TREE_CODE (arg) != SSA_NAME)
820 return;
822 arg_cand = base_cand_from_table (arg);
824 if (arg_cand)
826 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
828 if (!arg_cand->next_interp)
829 return;
831 arg_cand = lookup_cand (arg_cand->next_interp);
834 if (!integer_onep (arg_cand->stride))
835 return;
837 derived_base_name = arg_cand->base_expr;
838 arg_stmt = arg_cand->cand_stmt;
839 arg_bb = gimple_bb (arg_stmt);
841 /* Gather potential dead code savings if the phi statement
842 can be removed later on. */
843 if (uses_consumed_by_stmt (arg, phi))
845 if (gimple_code (arg_stmt) == GIMPLE_PHI)
846 savings += arg_cand->dead_savings;
847 else
848 savings += stmt_cost (arg_stmt, speed);
851 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
853 derived_base_name = arg;
854 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
857 if (!arg_bb || arg_bb->loop_father != cand_loop)
858 return;
860 if (i == 0)
861 arg0_base = derived_base_name;
862 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
863 return;
866 /* Create the candidate. "alloc_cand_and_find_basis" is named
867 misleadingly for this case, as no basis will be sought for a
868 CAND_PHI. */
869 base_type = TREE_TYPE (arg0_base);
871 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
872 0, integer_one_node, base_type,
873 sizetype, savings);
875 /* Add the candidate to the statement-candidate mapping. */
876 add_cand_for_stmt (phi, c);
879 /* Given PBASE which is a pointer to tree, look up the defining
880 statement for it and check whether the candidate is in the
881 form of:
883 X = B + (1 * S), S is integer constant
884 X = B + (i * S), S is integer one
886 If so, set PBASE to the candidate's base_expr and return double
887 int (i * S).
888 Otherwise, just return double int zero. */
890 static widest_int
891 backtrace_base_for_ref (tree *pbase)
893 tree base_in = *pbase;
894 slsr_cand_t base_cand;
896 STRIP_NOPS (base_in);
898 /* Strip off widening conversion(s) to handle cases where
899 e.g. 'B' is widened from an 'int' in order to calculate
900 a 64-bit address. */
901 if (CONVERT_EXPR_P (base_in)
902 && legal_cast_p_1 (TREE_TYPE (base_in),
903 TREE_TYPE (TREE_OPERAND (base_in, 0))))
904 base_in = get_unwidened (base_in, NULL_TREE);
906 if (TREE_CODE (base_in) != SSA_NAME)
907 return 0;
909 base_cand = base_cand_from_table (base_in);
911 while (base_cand && base_cand->kind != CAND_PHI)
913 if (base_cand->kind == CAND_ADD
914 && base_cand->index == 1
915 && TREE_CODE (base_cand->stride) == INTEGER_CST)
917 /* X = B + (1 * S), S is integer constant. */
918 *pbase = base_cand->base_expr;
919 return wi::to_widest (base_cand->stride);
921 else if (base_cand->kind == CAND_ADD
922 && TREE_CODE (base_cand->stride) == INTEGER_CST
923 && integer_onep (base_cand->stride))
925 /* X = B + (i * S), S is integer one. */
926 *pbase = base_cand->base_expr;
927 return base_cand->index;
930 if (base_cand->next_interp)
931 base_cand = lookup_cand (base_cand->next_interp);
932 else
933 base_cand = NULL;
936 return 0;
939 /* Look for the following pattern:
941 *PBASE: MEM_REF (T1, C1)
943 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
945 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
947 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
949 *PINDEX: C4 * BITS_PER_UNIT
951 If not present, leave the input values unchanged and return FALSE.
952 Otherwise, modify the input values as follows and return TRUE:
954 *PBASE: T1
955 *POFFSET: MULT_EXPR (T2, C3)
956 *PINDEX: C1 + (C2 * C3) + C4
958 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
959 will be further restructured to:
961 *PBASE: T1
962 *POFFSET: MULT_EXPR (T2', C3)
963 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
965 static bool
966 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
967 tree *ptype)
969 tree base = *pbase, offset = *poffset;
970 widest_int index = *pindex;
971 tree mult_op0, t1, t2, type;
972 widest_int c1, c2, c3, c4, c5;
973 offset_int mem_offset;
975 if (!base
976 || !offset
977 || TREE_CODE (base) != MEM_REF
978 || !mem_ref_offset (base).is_constant (&mem_offset)
979 || TREE_CODE (offset) != MULT_EXPR
980 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
981 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
982 return false;
984 t1 = TREE_OPERAND (base, 0);
985 c1 = widest_int::from (mem_offset, SIGNED);
986 type = TREE_TYPE (TREE_OPERAND (base, 1));
988 mult_op0 = TREE_OPERAND (offset, 0);
989 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
991 if (TREE_CODE (mult_op0) == PLUS_EXPR)
993 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
995 t2 = TREE_OPERAND (mult_op0, 0);
996 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
998 else
999 return false;
1001 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
1003 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1005 t2 = TREE_OPERAND (mult_op0, 0);
1006 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1008 else
1009 return false;
1011 else
1013 t2 = mult_op0;
1014 c2 = 0;
1017 c4 = index >> LOG2_BITS_PER_UNIT;
1018 c5 = backtrace_base_for_ref (&t2);
1020 *pbase = t1;
1021 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1022 wide_int_to_tree (sizetype, c3));
1023 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1024 *ptype = type;
1026 return true;
1029 /* Given GS which contains a data reference, create a CAND_REF entry in
1030 the candidate table and attempt to find a basis. */
1032 static void
1033 slsr_process_ref (gimple *gs)
1035 tree ref_expr, base, offset, type;
1036 poly_int64 bitsize, bitpos;
1037 machine_mode mode;
1038 int unsignedp, reversep, volatilep;
1039 slsr_cand_t c;
1041 if (gimple_vdef (gs))
1042 ref_expr = gimple_assign_lhs (gs);
1043 else
1044 ref_expr = gimple_assign_rhs1 (gs);
1046 if (!handled_component_p (ref_expr)
1047 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1048 || (TREE_CODE (ref_expr) == COMPONENT_REF
1049 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1050 return;
1052 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1053 &unsignedp, &reversep, &volatilep);
1054 HOST_WIDE_INT cbitpos;
1055 if (reversep || !bitpos.is_constant (&cbitpos))
1056 return;
1057 widest_int index = cbitpos;
1059 if (!restructure_reference (&base, &offset, &index, &type))
1060 return;
1062 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1063 type, sizetype, 0);
1065 /* Add the candidate to the statement-candidate mapping. */
1066 add_cand_for_stmt (gs, c);
1069 /* Create a candidate entry for a statement GS, where GS multiplies
1070 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1071 about the two SSA names into the new candidate. Return the new
1072 candidate. */
1074 static slsr_cand_t
1075 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1077 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1078 tree stype = NULL_TREE;
1079 widest_int index;
1080 unsigned savings = 0;
1081 slsr_cand_t c;
1082 slsr_cand_t base_cand = base_cand_from_table (base_in);
1084 /* Look at all interpretations of the base candidate, if necessary,
1085 to find information to propagate into this candidate. */
1086 while (base_cand && !base && base_cand->kind != CAND_PHI)
1089 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1091 /* Y = (B + i') * 1
1092 X = Y * Z
1093 ================
1094 X = (B + i') * Z */
1095 base = base_cand->base_expr;
1096 index = base_cand->index;
1097 stride = stride_in;
1098 ctype = base_cand->cand_type;
1099 stype = TREE_TYPE (stride_in);
1100 if (has_single_use (base_in))
1101 savings = (base_cand->dead_savings
1102 + stmt_cost (base_cand->cand_stmt, speed));
1104 else if (base_cand->kind == CAND_ADD
1105 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1107 /* Y = B + (i' * S), S constant
1108 X = Y * Z
1109 ============================
1110 X = B + ((i' * S) * Z) */
1111 base = base_cand->base_expr;
1112 index = base_cand->index * wi::to_widest (base_cand->stride);
1113 stride = stride_in;
1114 ctype = base_cand->cand_type;
1115 stype = TREE_TYPE (stride_in);
1116 if (has_single_use (base_in))
1117 savings = (base_cand->dead_savings
1118 + stmt_cost (base_cand->cand_stmt, speed));
1121 if (base_cand->next_interp)
1122 base_cand = lookup_cand (base_cand->next_interp);
1123 else
1124 base_cand = NULL;
1127 if (!base)
1129 /* No interpretations had anything useful to propagate, so
1130 produce X = (Y + 0) * Z. */
1131 base = base_in;
1132 index = 0;
1133 stride = stride_in;
1134 ctype = TREE_TYPE (base_in);
1135 stype = TREE_TYPE (stride_in);
1138 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1139 ctype, stype, savings);
1140 return c;
1143 /* Create a candidate entry for a statement GS, where GS multiplies
1144 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1145 information about BASE_IN into the new candidate. Return the new
1146 candidate. */
1148 static slsr_cand_t
1149 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1151 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1152 widest_int index, temp;
1153 unsigned savings = 0;
1154 slsr_cand_t c;
1155 slsr_cand_t base_cand = base_cand_from_table (base_in);
1157 /* Look at all interpretations of the base candidate, if necessary,
1158 to find information to propagate into this candidate. */
1159 while (base_cand && !base && base_cand->kind != CAND_PHI)
1161 if (base_cand->kind == CAND_MULT
1162 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1164 /* Y = (B + i') * S, S constant
1165 X = Y * c
1166 ============================
1167 X = (B + i') * (S * c) */
1168 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1169 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1171 base = base_cand->base_expr;
1172 index = base_cand->index;
1173 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1174 ctype = base_cand->cand_type;
1175 if (has_single_use (base_in))
1176 savings = (base_cand->dead_savings
1177 + stmt_cost (base_cand->cand_stmt, speed));
1180 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1182 /* Y = B + (i' * 1)
1183 X = Y * c
1184 ===========================
1185 X = (B + i') * c */
1186 base = base_cand->base_expr;
1187 index = base_cand->index;
1188 stride = stride_in;
1189 ctype = base_cand->cand_type;
1190 if (has_single_use (base_in))
1191 savings = (base_cand->dead_savings
1192 + stmt_cost (base_cand->cand_stmt, speed));
1194 else if (base_cand->kind == CAND_ADD
1195 && base_cand->index == 1
1196 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1198 /* Y = B + (1 * S), S constant
1199 X = Y * c
1200 ===========================
1201 X = (B + S) * c */
1202 base = base_cand->base_expr;
1203 index = wi::to_widest (base_cand->stride);
1204 stride = stride_in;
1205 ctype = base_cand->cand_type;
1206 if (has_single_use (base_in))
1207 savings = (base_cand->dead_savings
1208 + stmt_cost (base_cand->cand_stmt, speed));
1211 if (base_cand->next_interp)
1212 base_cand = lookup_cand (base_cand->next_interp);
1213 else
1214 base_cand = NULL;
1217 if (!base)
1219 /* No interpretations had anything useful to propagate, so
1220 produce X = (Y + 0) * c. */
1221 base = base_in;
1222 index = 0;
1223 stride = stride_in;
1224 ctype = TREE_TYPE (base_in);
1227 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1228 ctype, sizetype, savings);
1229 return c;
1232 /* Given GS which is a multiply of scalar integers, make an appropriate
1233 entry in the candidate table. If this is a multiply of two SSA names,
1234 create two CAND_MULT interpretations and attempt to find a basis for
1235 each of them. Otherwise, create a single CAND_MULT and attempt to
1236 find a basis. */
1238 static void
1239 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1241 slsr_cand_t c, c2;
1243 /* If this is a multiply of an SSA name with itself, it is highly
1244 unlikely that we will get a strength reduction opportunity, so
1245 don't record it as a candidate. This simplifies the logic for
1246 finding a basis, so if this is removed that must be considered. */
1247 if (rhs1 == rhs2)
1248 return;
1250 if (TREE_CODE (rhs2) == SSA_NAME)
1252 /* Record an interpretation of this statement in the candidate table
1253 assuming RHS1 is the base expression and RHS2 is the stride. */
1254 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1256 /* Add the first interpretation to the statement-candidate mapping. */
1257 add_cand_for_stmt (gs, c);
1259 /* Record another interpretation of this statement assuming RHS1
1260 is the stride and RHS2 is the base expression. */
1261 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1262 c->next_interp = c2->cand_num;
1264 else if (TREE_CODE (rhs2) == INTEGER_CST)
1266 /* Record an interpretation for the multiply-immediate. */
1267 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1269 /* Add the interpretation to the statement-candidate mapping. */
1270 add_cand_for_stmt (gs, c);
1274 /* Create a candidate entry for a statement GS, where GS adds two
1275 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1276 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1277 information about the two SSA names into the new candidate.
1278 Return the new candidate. */
1280 static slsr_cand_t
1281 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1282 bool subtract_p, bool speed)
1284 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1285 tree stype = NULL_TREE;
1286 widest_int index;
1287 unsigned savings = 0;
1288 slsr_cand_t c;
1289 slsr_cand_t base_cand = base_cand_from_table (base_in);
1290 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1292 /* The most useful transformation is a multiply-immediate feeding
1293 an add or subtract. Look for that first. */
1294 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1296 if (addend_cand->kind == CAND_MULT
1297 && addend_cand->index == 0
1298 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1300 /* Z = (B + 0) * S, S constant
1301 X = Y +/- Z
1302 ===========================
1303 X = Y + ((+/-1 * S) * B) */
1304 base = base_in;
1305 index = wi::to_widest (addend_cand->stride);
1306 if (subtract_p)
1307 index = -index;
1308 stride = addend_cand->base_expr;
1309 ctype = TREE_TYPE (base_in);
1310 stype = addend_cand->cand_type;
1311 if (has_single_use (addend_in))
1312 savings = (addend_cand->dead_savings
1313 + stmt_cost (addend_cand->cand_stmt, speed));
1316 if (addend_cand->next_interp)
1317 addend_cand = lookup_cand (addend_cand->next_interp);
1318 else
1319 addend_cand = NULL;
1322 while (base_cand && !base && base_cand->kind != CAND_PHI)
1324 if (base_cand->kind == CAND_ADD
1325 && (base_cand->index == 0
1326 || operand_equal_p (base_cand->stride,
1327 integer_zero_node, 0)))
1329 /* Y = B + (i' * S), i' * S = 0
1330 X = Y +/- Z
1331 ============================
1332 X = B + (+/-1 * Z) */
1333 base = base_cand->base_expr;
1334 index = subtract_p ? -1 : 1;
1335 stride = addend_in;
1336 ctype = base_cand->cand_type;
1337 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1338 : TREE_TYPE (addend_in));
1339 if (has_single_use (base_in))
1340 savings = (base_cand->dead_savings
1341 + stmt_cost (base_cand->cand_stmt, speed));
1343 else if (subtract_p)
1345 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1347 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1349 if (subtrahend_cand->kind == CAND_MULT
1350 && subtrahend_cand->index == 0
1351 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1353 /* Z = (B + 0) * S, S constant
1354 X = Y - Z
1355 ===========================
1356 Value: X = Y + ((-1 * S) * B) */
1357 base = base_in;
1358 index = wi::to_widest (subtrahend_cand->stride);
1359 index = -index;
1360 stride = subtrahend_cand->base_expr;
1361 ctype = TREE_TYPE (base_in);
1362 stype = subtrahend_cand->cand_type;
1363 if (has_single_use (addend_in))
1364 savings = (subtrahend_cand->dead_savings
1365 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1368 if (subtrahend_cand->next_interp)
1369 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1370 else
1371 subtrahend_cand = NULL;
1375 if (base_cand->next_interp)
1376 base_cand = lookup_cand (base_cand->next_interp);
1377 else
1378 base_cand = NULL;
1381 if (!base)
1383 /* No interpretations had anything useful to propagate, so
1384 produce X = Y + (1 * Z). */
1385 base = base_in;
1386 index = subtract_p ? -1 : 1;
1387 stride = addend_in;
1388 ctype = TREE_TYPE (base_in);
1389 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1390 : TREE_TYPE (addend_in));
1393 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1394 ctype, stype, savings);
1395 return c;
1398 /* Create a candidate entry for a statement GS, where GS adds SSA
1399 name BASE_IN to constant INDEX_IN. Propagate any known information
1400 about BASE_IN into the new candidate. Return the new candidate. */
1402 static slsr_cand_t
1403 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1404 bool speed)
1406 enum cand_kind kind = CAND_ADD;
1407 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1408 tree stype = NULL_TREE;
1409 widest_int index, multiple;
1410 unsigned savings = 0;
1411 slsr_cand_t c;
1412 slsr_cand_t base_cand = base_cand_from_table (base_in);
1414 while (base_cand && !base && base_cand->kind != CAND_PHI)
1416 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1418 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1419 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1420 sign, &multiple))
1422 /* Y = (B + i') * S, S constant, c = kS for some integer k
1423 X = Y + c
1424 ============================
1425 X = (B + (i'+ k)) * S
1427 Y = B + (i' * S), S constant, c = kS for some integer k
1428 X = Y + c
1429 ============================
1430 X = (B + (i'+ k)) * S */
1431 kind = base_cand->kind;
1432 base = base_cand->base_expr;
1433 index = base_cand->index + multiple;
1434 stride = base_cand->stride;
1435 ctype = base_cand->cand_type;
1436 stype = base_cand->stride_type;
1437 if (has_single_use (base_in))
1438 savings = (base_cand->dead_savings
1439 + stmt_cost (base_cand->cand_stmt, speed));
1442 if (base_cand->next_interp)
1443 base_cand = lookup_cand (base_cand->next_interp);
1444 else
1445 base_cand = NULL;
1448 if (!base)
1450 /* No interpretations had anything useful to propagate, so
1451 produce X = Y + (c * 1). */
1452 kind = CAND_ADD;
1453 base = base_in;
1454 index = index_in;
1455 stride = integer_one_node;
1456 ctype = TREE_TYPE (base_in);
1457 stype = sizetype;
1460 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1461 ctype, stype, savings);
1462 return c;
1465 /* Given GS which is an add or subtract of scalar integers or pointers,
1466 make at least one appropriate entry in the candidate table. */
1468 static void
1469 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1471 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1472 slsr_cand_t c = NULL, c2;
1474 if (TREE_CODE (rhs2) == SSA_NAME)
1476 /* First record an interpretation assuming RHS1 is the base expression
1477 and RHS2 is the stride. But it doesn't make sense for the
1478 stride to be a pointer, so don't record a candidate in that case. */
1479 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1481 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1483 /* Add the first interpretation to the statement-candidate
1484 mapping. */
1485 add_cand_for_stmt (gs, c);
1488 /* If the two RHS operands are identical, or this is a subtract,
1489 we're done. */
1490 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1491 return;
1493 /* Otherwise, record another interpretation assuming RHS2 is the
1494 base expression and RHS1 is the stride, again provided that the
1495 stride is not a pointer. */
1496 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1498 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1499 if (c)
1500 c->next_interp = c2->cand_num;
1501 else
1502 add_cand_for_stmt (gs, c2);
1505 else if (TREE_CODE (rhs2) == INTEGER_CST)
1507 /* Record an interpretation for the add-immediate. */
1508 widest_int index = wi::to_widest (rhs2);
1509 if (subtract_p)
1510 index = -index;
1512 c = create_add_imm_cand (gs, rhs1, index, speed);
1514 /* Add the interpretation to the statement-candidate mapping. */
1515 add_cand_for_stmt (gs, c);
1519 /* Given GS which is a negate of a scalar integer, make an appropriate
1520 entry in the candidate table. A negate is equivalent to a multiply
1521 by -1. */
1523 static void
1524 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1526 /* Record a CAND_MULT interpretation for the multiply by -1. */
1527 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1529 /* Add the interpretation to the statement-candidate mapping. */
1530 add_cand_for_stmt (gs, c);
1533 /* Help function for legal_cast_p, operating on two trees. Checks
1534 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1535 for more details. */
1537 static bool
1538 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1540 unsigned lhs_size, rhs_size;
1541 bool lhs_wraps, rhs_wraps;
1543 lhs_size = TYPE_PRECISION (lhs_type);
1544 rhs_size = TYPE_PRECISION (rhs_type);
1545 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1546 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1548 if (lhs_size < rhs_size
1549 || (rhs_wraps && !lhs_wraps)
1550 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1551 return false;
1553 return true;
1556 /* Return TRUE if GS is a statement that defines an SSA name from
1557 a conversion and is legal for us to combine with an add and multiply
1558 in the candidate table. For example, suppose we have:
1560 A = B + i;
1561 C = (type) A;
1562 D = C * S;
1564 Without the type-cast, we would create a CAND_MULT for D with base B,
1565 index i, and stride S. We want to record this candidate only if it
1566 is equivalent to apply the type cast following the multiply:
1568 A = B + i;
1569 E = A * S;
1570 D = (type) E;
1572 We will record the type with the candidate for D. This allows us
1573 to use a similar previous candidate as a basis. If we have earlier seen
1575 A' = B + i';
1576 C' = (type) A';
1577 D' = C' * S;
1579 we can replace D with
1581 D = D' + (i - i') * S;
1583 But if moving the type-cast would change semantics, we mustn't do this.
1585 This is legitimate for casts from a non-wrapping integral type to
1586 any integral type of the same or larger size. It is not legitimate
1587 to convert a wrapping type to a non-wrapping type, or to a wrapping
1588 type of a different size. I.e., with a wrapping type, we must
1589 assume that the addition B + i could wrap, in which case performing
1590 the multiply before or after one of the "illegal" type casts will
1591 have different semantics. */
1593 static bool
1594 legal_cast_p (gimple *gs, tree rhs)
1596 if (!is_gimple_assign (gs)
1597 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1598 return false;
1600 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1603 /* Given GS which is a cast to a scalar integer type, determine whether
1604 the cast is legal for strength reduction. If so, make at least one
1605 appropriate entry in the candidate table. */
1607 static void
1608 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1610 tree lhs, ctype;
1611 slsr_cand_t base_cand, c = NULL, c2;
1612 unsigned savings = 0;
1614 if (!legal_cast_p (gs, rhs1))
1615 return;
1617 lhs = gimple_assign_lhs (gs);
1618 base_cand = base_cand_from_table (rhs1);
1619 ctype = TREE_TYPE (lhs);
1621 if (base_cand && base_cand->kind != CAND_PHI)
1623 while (base_cand)
1625 /* Propagate all data from the base candidate except the type,
1626 which comes from the cast, and the base candidate's cast,
1627 which is no longer applicable. */
1628 if (has_single_use (rhs1))
1629 savings = (base_cand->dead_savings
1630 + stmt_cost (base_cand->cand_stmt, speed));
1632 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1633 base_cand->base_expr,
1634 base_cand->index, base_cand->stride,
1635 ctype, base_cand->stride_type,
1636 savings);
1637 if (base_cand->next_interp)
1638 base_cand = lookup_cand (base_cand->next_interp);
1639 else
1640 base_cand = NULL;
1643 else
1645 /* If nothing is known about the RHS, create fresh CAND_ADD and
1646 CAND_MULT interpretations:
1648 X = Y + (0 * 1)
1649 X = (Y + 0) * 1
1651 The first of these is somewhat arbitrary, but the choice of
1652 1 for the stride simplifies the logic for propagating casts
1653 into their uses. */
1654 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1655 integer_one_node, ctype, sizetype, 0);
1656 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1657 integer_one_node, ctype, sizetype, 0);
1658 c->next_interp = c2->cand_num;
1661 /* Add the first (or only) interpretation to the statement-candidate
1662 mapping. */
1663 add_cand_for_stmt (gs, c);
1666 /* Given GS which is a copy of a scalar integer type, make at least one
1667 appropriate entry in the candidate table.
1669 This interface is included for completeness, but is unnecessary
1670 if this pass immediately follows a pass that performs copy
1671 propagation, such as DOM. */
1673 static void
1674 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1676 slsr_cand_t base_cand, c = NULL, c2;
1677 unsigned savings = 0;
1679 base_cand = base_cand_from_table (rhs1);
1681 if (base_cand && base_cand->kind != CAND_PHI)
1683 while (base_cand)
1685 /* Propagate all data from the base candidate. */
1686 if (has_single_use (rhs1))
1687 savings = (base_cand->dead_savings
1688 + stmt_cost (base_cand->cand_stmt, speed));
1690 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1691 base_cand->base_expr,
1692 base_cand->index, base_cand->stride,
1693 base_cand->cand_type,
1694 base_cand->stride_type, savings);
1695 if (base_cand->next_interp)
1696 base_cand = lookup_cand (base_cand->next_interp);
1697 else
1698 base_cand = NULL;
1701 else
1703 /* If nothing is known about the RHS, create fresh CAND_ADD and
1704 CAND_MULT interpretations:
1706 X = Y + (0 * 1)
1707 X = (Y + 0) * 1
1709 The first of these is somewhat arbitrary, but the choice of
1710 1 for the stride simplifies the logic for propagating casts
1711 into their uses. */
1712 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1713 integer_one_node, TREE_TYPE (rhs1),
1714 sizetype, 0);
1715 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1716 integer_one_node, TREE_TYPE (rhs1),
1717 sizetype, 0);
1718 c->next_interp = c2->cand_num;
1721 /* Add the first (or only) interpretation to the statement-candidate
1722 mapping. */
1723 add_cand_for_stmt (gs, c);
1726 class find_candidates_dom_walker : public dom_walker
1728 public:
1729 find_candidates_dom_walker (cdi_direction direction)
1730 : dom_walker (direction) {}
1731 virtual edge before_dom_children (basic_block);
1734 /* Find strength-reduction candidates in block BB. */
1736 edge
1737 find_candidates_dom_walker::before_dom_children (basic_block bb)
1739 bool speed = optimize_bb_for_speed_p (bb);
1741 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1742 gsi_next (&gsi))
1743 slsr_process_phi (gsi.phi (), speed);
1745 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1746 gsi_next (&gsi))
1748 gimple *gs = gsi_stmt (gsi);
1750 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1751 slsr_process_ref (gs);
1753 else if (is_gimple_assign (gs)
1754 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1755 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1757 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1759 switch (gimple_assign_rhs_code (gs))
1761 case MULT_EXPR:
1762 case PLUS_EXPR:
1763 rhs1 = gimple_assign_rhs1 (gs);
1764 rhs2 = gimple_assign_rhs2 (gs);
1765 /* Should never happen, but currently some buggy situations
1766 in earlier phases put constants in rhs1. */
1767 if (TREE_CODE (rhs1) != SSA_NAME)
1768 continue;
1769 break;
1771 /* Possible future opportunity: rhs1 of a ptr+ can be
1772 an ADDR_EXPR. */
1773 case POINTER_PLUS_EXPR:
1774 case MINUS_EXPR:
1775 rhs2 = gimple_assign_rhs2 (gs);
1776 gcc_fallthrough ();
1778 CASE_CONVERT:
1779 case SSA_NAME:
1780 case NEGATE_EXPR:
1781 rhs1 = gimple_assign_rhs1 (gs);
1782 if (TREE_CODE (rhs1) != SSA_NAME)
1783 continue;
1784 break;
1786 default:
1790 switch (gimple_assign_rhs_code (gs))
1792 case MULT_EXPR:
1793 slsr_process_mul (gs, rhs1, rhs2, speed);
1794 break;
1796 case PLUS_EXPR:
1797 case POINTER_PLUS_EXPR:
1798 case MINUS_EXPR:
1799 slsr_process_add (gs, rhs1, rhs2, speed);
1800 break;
1802 case NEGATE_EXPR:
1803 slsr_process_neg (gs, rhs1, speed);
1804 break;
1806 CASE_CONVERT:
1807 slsr_process_cast (gs, rhs1, speed);
1808 break;
1810 case SSA_NAME:
1811 slsr_process_copy (gs, rhs1, speed);
1812 break;
1814 default:
1819 return NULL;
1822 /* Dump a candidate for debug. */
1824 static void
1825 dump_candidate (slsr_cand_t c)
1827 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1828 gimple_bb (c->cand_stmt)->index);
1829 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1830 switch (c->kind)
1832 case CAND_MULT:
1833 fputs (" MULT : (", dump_file);
1834 print_generic_expr (dump_file, c->base_expr);
1835 fputs (" + ", dump_file);
1836 print_decs (c->index, dump_file);
1837 fputs (") * ", dump_file);
1838 if (TREE_CODE (c->stride) != INTEGER_CST
1839 && c->stride_type != TREE_TYPE (c->stride))
1841 fputs ("(", dump_file);
1842 print_generic_expr (dump_file, c->stride_type);
1843 fputs (")", dump_file);
1845 print_generic_expr (dump_file, c->stride);
1846 fputs (" : ", dump_file);
1847 break;
1848 case CAND_ADD:
1849 fputs (" ADD : ", dump_file);
1850 print_generic_expr (dump_file, c->base_expr);
1851 fputs (" + (", dump_file);
1852 print_decs (c->index, dump_file);
1853 fputs (" * ", dump_file);
1854 if (TREE_CODE (c->stride) != INTEGER_CST
1855 && c->stride_type != TREE_TYPE (c->stride))
1857 fputs ("(", dump_file);
1858 print_generic_expr (dump_file, c->stride_type);
1859 fputs (")", dump_file);
1861 print_generic_expr (dump_file, c->stride);
1862 fputs (") : ", dump_file);
1863 break;
1864 case CAND_REF:
1865 fputs (" REF : ", dump_file);
1866 print_generic_expr (dump_file, c->base_expr);
1867 fputs (" + (", dump_file);
1868 print_generic_expr (dump_file, c->stride);
1869 fputs (") + ", dump_file);
1870 print_decs (c->index, dump_file);
1871 fputs (" : ", dump_file);
1872 break;
1873 case CAND_PHI:
1874 fputs (" PHI : ", dump_file);
1875 print_generic_expr (dump_file, c->base_expr);
1876 fputs (" + (unknown * ", dump_file);
1877 print_generic_expr (dump_file, c->stride);
1878 fputs (") : ", dump_file);
1879 break;
1880 default:
1881 gcc_unreachable ();
1883 print_generic_expr (dump_file, c->cand_type);
1884 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1885 c->basis, c->dependent, c->sibling);
1886 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1887 c->next_interp, c->dead_savings);
1888 if (c->def_phi)
1889 fprintf (dump_file, " phi: %d\n", c->def_phi);
1890 fputs ("\n", dump_file);
1893 /* Dump the candidate vector for debug. */
1895 static void
1896 dump_cand_vec (void)
1898 unsigned i;
1899 slsr_cand_t c;
1901 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1903 FOR_EACH_VEC_ELT (cand_vec, i, c)
1904 dump_candidate (c);
1907 /* Callback used to dump the candidate chains hash table. */
1910 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1912 const_cand_chain_t chain = *slot;
1913 cand_chain_t p;
1915 print_generic_expr (dump_file, chain->base_expr);
1916 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1918 for (p = chain->next; p; p = p->next)
1919 fprintf (dump_file, " -> %d", p->cand->cand_num);
1921 fputs ("\n", dump_file);
1922 return 1;
1925 /* Dump the candidate chains. */
1927 static void
1928 dump_cand_chains (void)
1930 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1931 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1932 (NULL);
1933 fputs ("\n", dump_file);
1936 /* Dump the increment vector for debug. */
1938 static void
1939 dump_incr_vec (void)
1941 if (dump_file && (dump_flags & TDF_DETAILS))
1943 unsigned i;
1945 fprintf (dump_file, "\nIncrement vector:\n\n");
1947 for (i = 0; i < incr_vec_len; i++)
1949 fprintf (dump_file, "%3d increment: ", i);
1950 print_decs (incr_vec[i].incr, dump_file);
1951 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1952 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1953 fputs ("\n initializer: ", dump_file);
1954 print_generic_expr (dump_file, incr_vec[i].initializer);
1955 fputs ("\n\n", dump_file);
1960 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1961 data reference. */
1963 static void
1964 replace_ref (tree *expr, slsr_cand_t c)
1966 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1967 unsigned HOST_WIDE_INT misalign;
1968 unsigned align;
1970 /* Ensure the memory reference carries the minimum alignment
1971 requirement for the data type. See PR58041. */
1972 get_object_alignment_1 (*expr, &align, &misalign);
1973 if (misalign != 0)
1974 align = least_bit_hwi (misalign);
1975 if (align < TYPE_ALIGN (acc_type))
1976 acc_type = build_aligned_type (acc_type, align);
1978 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1979 c->base_expr, c->stride);
1980 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1981 wide_int_to_tree (c->cand_type, c->index));
1983 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1984 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1985 TREE_OPERAND (mem_ref, 0)
1986 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1987 /*simple_p=*/true, NULL,
1988 /*before=*/true, GSI_SAME_STMT);
1989 copy_ref_info (mem_ref, *expr);
1990 *expr = mem_ref;
1991 update_stmt (c->cand_stmt);
1994 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1995 dependent of candidate C with an equivalent strength-reduced data
1996 reference. */
1998 static void
1999 replace_refs (slsr_cand_t c)
2001 if (dump_file && (dump_flags & TDF_DETAILS))
2003 fputs ("Replacing reference: ", dump_file);
2004 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2007 if (gimple_vdef (c->cand_stmt))
2009 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2010 replace_ref (lhs, c);
2012 else
2014 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2015 replace_ref (rhs, c);
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2020 fputs ("With: ", dump_file);
2021 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2022 fputs ("\n", dump_file);
2025 if (c->sibling)
2026 replace_refs (lookup_cand (c->sibling));
2028 if (c->dependent)
2029 replace_refs (lookup_cand (c->dependent));
2032 /* Return TRUE if candidate C is dependent upon a PHI. */
2034 static bool
2035 phi_dependent_cand_p (slsr_cand_t c)
2037 /* A candidate is not necessarily dependent upon a PHI just because
2038 it has a phi definition for its base name. It may have a basis
2039 that relies upon the same phi definition, in which case the PHI
2040 is irrelevant to this candidate. */
2041 return (c->def_phi
2042 && c->basis
2043 && lookup_cand (c->basis)->def_phi != c->def_phi);
2046 /* Calculate the increment required for candidate C relative to
2047 its basis. */
2049 static widest_int
2050 cand_increment (slsr_cand_t c)
2052 slsr_cand_t basis;
2054 /* If the candidate doesn't have a basis, just return its own
2055 index. This is useful in record_increments to help us find
2056 an existing initializer. Also, if the candidate's basis is
2057 hidden by a phi, then its own index will be the increment
2058 from the newly introduced phi basis. */
2059 if (!c->basis || phi_dependent_cand_p (c))
2060 return c->index;
2062 basis = lookup_cand (c->basis);
2063 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2064 return c->index - basis->index;
2067 /* Calculate the increment required for candidate C relative to
2068 its basis. If we aren't going to generate pointer arithmetic
2069 for this candidate, return the absolute value of that increment
2070 instead. */
2072 static inline widest_int
2073 cand_abs_increment (slsr_cand_t c)
2075 widest_int increment = cand_increment (c);
2077 if (!address_arithmetic_p && wi::neg_p (increment))
2078 increment = -increment;
2080 return increment;
2083 /* Return TRUE iff candidate C has already been replaced under
2084 another interpretation. */
2086 static inline bool
2087 cand_already_replaced (slsr_cand_t c)
2089 return (gimple_bb (c->cand_stmt) == 0);
2092 /* Common logic used by replace_unconditional_candidate and
2093 replace_conditional_candidate. */
2095 static void
2096 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2098 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2099 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2101 /* It is not useful to replace casts, copies, negates, or adds of
2102 an SSA name and a constant. */
2103 if (cand_code == SSA_NAME
2104 || CONVERT_EXPR_CODE_P (cand_code)
2105 || cand_code == PLUS_EXPR
2106 || cand_code == POINTER_PLUS_EXPR
2107 || cand_code == MINUS_EXPR
2108 || cand_code == NEGATE_EXPR)
2109 return;
2111 enum tree_code code = PLUS_EXPR;
2112 tree bump_tree;
2113 gimple *stmt_to_print = NULL;
2115 if (wi::neg_p (bump))
2117 code = MINUS_EXPR;
2118 bump = -bump;
2121 /* It is possible that the resulting bump doesn't fit in target_type.
2122 Abandon the replacement in this case. This does not affect
2123 siblings or dependents of C. */
2124 if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2125 TYPE_SIGN (target_type)))
2126 return;
2128 bump_tree = wide_int_to_tree (target_type, bump);
2130 /* If the basis name and the candidate's LHS have incompatible types,
2131 introduce a cast. */
2132 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2133 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2135 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fputs ("Replacing: ", dump_file);
2138 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2141 if (bump == 0)
2143 tree lhs = gimple_assign_lhs (c->cand_stmt);
2144 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2145 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2146 slsr_cand_t cc = c;
2147 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2148 gsi_replace (&gsi, copy_stmt, false);
2149 c->cand_stmt = copy_stmt;
2150 while (cc->next_interp)
2152 cc = lookup_cand (cc->next_interp);
2153 cc->cand_stmt = copy_stmt;
2155 if (dump_file && (dump_flags & TDF_DETAILS))
2156 stmt_to_print = copy_stmt;
2158 else
2160 tree rhs1, rhs2;
2161 if (cand_code != NEGATE_EXPR) {
2162 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2163 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2165 if (cand_code != NEGATE_EXPR
2166 && ((operand_equal_p (rhs1, basis_name, 0)
2167 && operand_equal_p (rhs2, bump_tree, 0))
2168 || (operand_equal_p (rhs1, bump_tree, 0)
2169 && operand_equal_p (rhs2, basis_name, 0))))
2171 if (dump_file && (dump_flags & TDF_DETAILS))
2173 fputs ("(duplicate, not actually replacing)", dump_file);
2174 stmt_to_print = c->cand_stmt;
2177 else
2179 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2180 slsr_cand_t cc = c;
2181 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2182 update_stmt (gsi_stmt (gsi));
2183 c->cand_stmt = gsi_stmt (gsi);
2184 while (cc->next_interp)
2186 cc = lookup_cand (cc->next_interp);
2187 cc->cand_stmt = gsi_stmt (gsi);
2189 if (dump_file && (dump_flags & TDF_DETAILS))
2190 stmt_to_print = gsi_stmt (gsi);
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2196 fputs ("With: ", dump_file);
2197 print_gimple_stmt (dump_file, stmt_to_print, 0);
2198 fputs ("\n", dump_file);
2202 /* Replace candidate C with an add or subtract. Note that we only
2203 operate on CAND_MULTs with known strides, so we will never generate
2204 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2205 X = Y + ((i - i') * S), as described in the module commentary. The
2206 folded value ((i - i') * S) is referred to here as the "bump." */
2208 static void
2209 replace_unconditional_candidate (slsr_cand_t c)
2211 slsr_cand_t basis;
2213 if (cand_already_replaced (c))
2214 return;
2216 basis = lookup_cand (c->basis);
2217 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2219 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2222 /* Return the index in the increment vector of the given INCREMENT,
2223 or -1 if not found. The latter can occur if more than
2224 MAX_INCR_VEC_LEN increments have been found. */
2226 static inline int
2227 incr_vec_index (const widest_int &increment)
2229 unsigned i;
2231 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2234 if (i < incr_vec_len)
2235 return i;
2236 else
2237 return -1;
2240 /* Create a new statement along edge E to add BASIS_NAME to the product
2241 of INCREMENT and the stride of candidate C. Create and return a new
2242 SSA name from *VAR to be used as the LHS of the new statement.
2243 KNOWN_STRIDE is true iff C's stride is a constant. */
2245 static tree
2246 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2247 widest_int increment, edge e, location_t loc,
2248 bool known_stride)
2250 tree lhs, basis_type;
2251 gassign *new_stmt, *cast_stmt = NULL;
2253 /* If the add candidate along this incoming edge has the same
2254 index as C's hidden basis, the hidden basis represents this
2255 edge correctly. */
2256 if (increment == 0)
2257 return basis_name;
2259 basis_type = TREE_TYPE (basis_name);
2260 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2262 /* Occasionally people convert integers to pointers without a
2263 cast, leading us into trouble if we aren't careful. */
2264 enum tree_code plus_code
2265 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2267 if (known_stride)
2269 tree bump_tree;
2270 enum tree_code code = plus_code;
2271 widest_int bump = increment * wi::to_widest (c->stride);
2272 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2274 code = MINUS_EXPR;
2275 bump = -bump;
2278 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2279 bump_tree = wide_int_to_tree (stride_type, bump);
2280 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2282 else
2284 int i;
2285 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2286 i = incr_vec_index (negate_incr ? -increment : increment);
2287 gcc_assert (i >= 0);
2289 if (incr_vec[i].initializer)
2291 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2292 new_stmt = gimple_build_assign (lhs, code, basis_name,
2293 incr_vec[i].initializer);
2295 else {
2296 tree stride;
2298 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2300 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2301 "slsr");
2302 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2303 c->stride);
2304 stride = cast_stride;
2306 else
2307 stride = c->stride;
2309 if (increment == 1)
2310 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2311 else if (increment == -1)
2312 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2313 else
2314 gcc_unreachable ();
2318 if (cast_stmt)
2320 gimple_set_location (cast_stmt, loc);
2321 gsi_insert_on_edge (e, cast_stmt);
2324 gimple_set_location (new_stmt, loc);
2325 gsi_insert_on_edge (e, new_stmt);
2327 if (dump_file && (dump_flags & TDF_DETAILS))
2329 if (cast_stmt)
2331 fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2332 e->src->index, e->dest->index);
2333 print_gimple_stmt (dump_file, cast_stmt, 0);
2335 fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2336 e->dest->index);
2337 print_gimple_stmt (dump_file, new_stmt, 0);
2340 return lhs;
2343 /* Clear the visited field for a tree of PHI candidates. */
2345 static void
2346 clear_visited (gphi *phi)
2348 unsigned i;
2349 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2351 if (phi_cand->visited)
2353 phi_cand->visited = 0;
2355 for (i = 0; i < gimple_phi_num_args (phi); i++)
2357 tree arg = gimple_phi_arg_def (phi, i);
2358 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2359 if (gimple_code (arg_def) == GIMPLE_PHI)
2360 clear_visited (as_a <gphi *> (arg_def));
2365 /* Recursive helper function for create_phi_basis. */
2367 static tree
2368 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2369 location_t loc, bool known_stride)
2371 int i;
2372 tree name, phi_arg;
2373 gphi *phi;
2374 slsr_cand_t basis = lookup_cand (c->basis);
2375 int nargs = gimple_phi_num_args (from_phi);
2376 basic_block phi_bb = gimple_bb (from_phi);
2377 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2378 auto_vec<tree> phi_args (nargs);
2380 if (phi_cand->visited)
2381 return phi_cand->cached_basis;
2382 phi_cand->visited = 1;
2384 /* Process each argument of the existing phi that represents
2385 conditionally-executed add candidates. */
2386 for (i = 0; i < nargs; i++)
2388 edge e = (*phi_bb->preds)[i];
2389 tree arg = gimple_phi_arg_def (from_phi, i);
2390 tree feeding_def;
2392 /* If the phi argument is the base name of the CAND_PHI, then
2393 this incoming arc should use the hidden basis. */
2394 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2395 if (basis->index == 0)
2396 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2397 else
2399 widest_int incr = -basis->index;
2400 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2401 e, loc, known_stride);
2403 else
2405 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2407 /* If there is another phi along this incoming edge, we must
2408 process it in the same fashion to ensure that all basis
2409 adjustments are made along its incoming edges. */
2410 if (gimple_code (arg_def) == GIMPLE_PHI)
2411 feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2412 loc, known_stride);
2413 else
2415 slsr_cand_t arg_cand = base_cand_from_table (arg);
2416 widest_int diff = arg_cand->index - basis->index;
2417 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2418 e, loc, known_stride);
2422 /* Because of recursion, we need to save the arguments in a vector
2423 so we can create the PHI statement all at once. Otherwise the
2424 storage for the half-created PHI can be reclaimed. */
2425 phi_args.safe_push (feeding_def);
2428 /* Create the new phi basis. */
2429 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2430 phi = create_phi_node (name, phi_bb);
2431 SSA_NAME_DEF_STMT (name) = phi;
2433 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2435 edge e = (*phi_bb->preds)[i];
2436 add_phi_arg (phi, phi_arg, e, loc);
2439 update_stmt (phi);
2441 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fputs ("Introducing new phi basis: ", dump_file);
2444 print_gimple_stmt (dump_file, phi, 0);
2447 phi_cand->cached_basis = name;
2448 return name;
2451 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2452 is hidden by the phi node FROM_PHI, create a new phi node in the same
2453 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2454 with its phi arguments representing conditional adjustments to the
2455 hidden basis along conditional incoming paths. Those adjustments are
2456 made by creating add statements (and sometimes recursively creating
2457 phis) along those incoming paths. LOC is the location to attach to
2458 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2459 constant. */
2461 static tree
2462 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2463 location_t loc, bool known_stride)
2465 tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2466 known_stride);
2467 gcc_assert (retval);
2468 clear_visited (as_a <gphi *> (from_phi));
2469 return retval;
2472 /* Given a candidate C whose basis is hidden by at least one intervening
2473 phi, introduce a matching number of new phis to represent its basis
2474 adjusted by conditional increments along possible incoming paths. Then
2475 replace C as though it were an unconditional candidate, using the new
2476 basis. */
2478 static void
2479 replace_conditional_candidate (slsr_cand_t c)
2481 tree basis_name, name;
2482 slsr_cand_t basis;
2483 location_t loc;
2485 /* Look up the LHS SSA name from C's basis. This will be the
2486 RHS1 of the adds we will introduce to create new phi arguments. */
2487 basis = lookup_cand (c->basis);
2488 basis_name = gimple_assign_lhs (basis->cand_stmt);
2490 /* Create a new phi statement which will represent C's true basis
2491 after the transformation is complete. */
2492 loc = gimple_location (c->cand_stmt);
2493 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2494 basis_name, loc, KNOWN_STRIDE);
2496 /* Replace C with an add of the new basis phi and a constant. */
2497 widest_int bump = c->index * wi::to_widest (c->stride);
2499 replace_mult_candidate (c, name, bump);
2502 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2503 how many PHI nodes we have visited at this point in the tree walk. */
2505 static int
2506 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2508 unsigned i;
2509 int cost = 0;
2510 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2512 if (phi_cand->visited)
2513 return 0;
2515 phi_cand->visited = 1;
2516 (*spread)++;
2518 /* If we work our way back to a phi that isn't dominated by the hidden
2519 basis, this isn't a candidate for replacement. Indicate this by
2520 returning an unreasonably high cost. It's not easy to detect
2521 these situations when determining the basis, so we defer the
2522 decision until now. */
2523 basic_block phi_bb = gimple_bb (phi);
2524 slsr_cand_t basis = lookup_cand (c->basis);
2525 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2527 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2528 return COST_INFINITE;
2530 for (i = 0; i < gimple_phi_num_args (phi); i++)
2532 tree arg = gimple_phi_arg_def (phi, i);
2534 if (arg != phi_cand->base_expr)
2536 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2538 if (gimple_code (arg_def) == GIMPLE_PHI)
2540 cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2542 if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2543 return COST_INFINITE;
2545 else
2547 slsr_cand_t arg_cand = base_cand_from_table (arg);
2549 if (arg_cand->index != c->index)
2550 cost += one_add_cost;
2555 return cost;
2558 /* Compute the expected costs of inserting basis adjustments for
2559 candidate C with phi-definition PHI. The cost of inserting
2560 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2561 which are themselves phi results, recursively calculate costs
2562 for those phis as well. */
2564 static int
2565 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2567 int spread = 0;
2568 int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2569 clear_visited (as_a <gphi *> (phi));
2570 return retval;
2572 /* For candidate C, each sibling of candidate C, and each dependent of
2573 candidate C, determine whether the candidate is dependent upon a
2574 phi that hides its basis. If not, replace the candidate unconditionally.
2575 Otherwise, determine whether the cost of introducing compensation code
2576 for the candidate is offset by the gains from strength reduction. If
2577 so, replace the candidate and introduce the compensation code. */
2579 static void
2580 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2582 if (phi_dependent_cand_p (c))
2584 /* A multiply candidate with a stride of 1 is just an artifice
2585 of a copy or cast; there is no value in replacing it. */
2586 if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2588 /* A candidate dependent upon a phi will replace a multiply by
2589 a constant with an add, and will insert at most one add for
2590 each phi argument. Add these costs with the potential dead-code
2591 savings to determine profitability. */
2592 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2593 int mult_savings = stmt_cost (c->cand_stmt, speed);
2594 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2595 tree phi_result = gimple_phi_result (phi);
2596 int one_add_cost = add_cost (speed,
2597 TYPE_MODE (TREE_TYPE (phi_result)));
2598 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2599 int cost = add_costs - mult_savings - c->dead_savings;
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2603 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2604 fprintf (dump_file, " add_costs = %d\n", add_costs);
2605 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2606 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2607 fprintf (dump_file, " cost = %d\n", cost);
2608 if (cost <= COST_NEUTRAL)
2609 fputs (" Replacing...\n", dump_file);
2610 else
2611 fputs (" Not replaced.\n", dump_file);
2614 if (cost <= COST_NEUTRAL)
2615 replace_conditional_candidate (c);
2618 else
2619 replace_unconditional_candidate (c);
2621 if (c->sibling)
2622 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2624 if (c->dependent)
2625 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2628 /* Count the number of candidates in the tree rooted at C that have
2629 not already been replaced under other interpretations. */
2631 static int
2632 count_candidates (slsr_cand_t c)
2634 unsigned count = cand_already_replaced (c) ? 0 : 1;
2636 if (c->sibling)
2637 count += count_candidates (lookup_cand (c->sibling));
2639 if (c->dependent)
2640 count += count_candidates (lookup_cand (c->dependent));
2642 return count;
2645 /* Increase the count of INCREMENT by one in the increment vector.
2646 INCREMENT is associated with candidate C. If INCREMENT is to be
2647 conditionally executed as part of a conditional candidate replacement,
2648 IS_PHI_ADJUST is true, otherwise false. If an initializer
2649 T_0 = stride * I is provided by a candidate that dominates all
2650 candidates with the same increment, also record T_0 for subsequent use. */
2652 static void
2653 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2655 bool found = false;
2656 unsigned i;
2658 /* Treat increments that differ only in sign as identical so as to
2659 share initializers, unless we are generating pointer arithmetic. */
2660 if (!address_arithmetic_p && wi::neg_p (increment))
2661 increment = -increment;
2663 for (i = 0; i < incr_vec_len; i++)
2665 if (incr_vec[i].incr == increment)
2667 incr_vec[i].count++;
2668 found = true;
2670 /* If we previously recorded an initializer that doesn't
2671 dominate this candidate, it's not going to be useful to
2672 us after all. */
2673 if (incr_vec[i].initializer
2674 && !dominated_by_p (CDI_DOMINATORS,
2675 gimple_bb (c->cand_stmt),
2676 incr_vec[i].init_bb))
2678 incr_vec[i].initializer = NULL_TREE;
2679 incr_vec[i].init_bb = NULL;
2682 break;
2686 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2688 /* The first time we see an increment, create the entry for it.
2689 If this is the root candidate which doesn't have a basis, set
2690 the count to zero. We're only processing it so it can possibly
2691 provide an initializer for other candidates. */
2692 incr_vec[incr_vec_len].incr = increment;
2693 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2694 incr_vec[incr_vec_len].cost = COST_INFINITE;
2696 /* Optimistically record the first occurrence of this increment
2697 as providing an initializer (if it does); we will revise this
2698 opinion later if it doesn't dominate all other occurrences.
2699 Exception: increments of 0, 1 never need initializers;
2700 and phi adjustments don't ever provide initializers. */
2701 if (c->kind == CAND_ADD
2702 && !is_phi_adjust
2703 && c->index == increment
2704 && (increment > 1 || increment < 0)
2705 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2706 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2708 tree t0 = NULL_TREE;
2709 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2710 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2711 if (operand_equal_p (rhs1, c->base_expr, 0))
2712 t0 = rhs2;
2713 else if (operand_equal_p (rhs2, c->base_expr, 0))
2714 t0 = rhs1;
2715 if (t0
2716 && SSA_NAME_DEF_STMT (t0)
2717 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2719 incr_vec[incr_vec_len].initializer = t0;
2720 incr_vec[incr_vec_len++].init_bb
2721 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2723 else
2725 incr_vec[incr_vec_len].initializer = NULL_TREE;
2726 incr_vec[incr_vec_len++].init_bb = NULL;
2729 else
2731 incr_vec[incr_vec_len].initializer = NULL_TREE;
2732 incr_vec[incr_vec_len++].init_bb = NULL;
2737 /* Recursive helper function for record_phi_increments. */
2739 static void
2740 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2742 unsigned i;
2743 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2745 if (phi_cand->visited)
2746 return;
2747 phi_cand->visited = 1;
2749 for (i = 0; i < gimple_phi_num_args (phi); i++)
2751 tree arg = gimple_phi_arg_def (phi, i);
2753 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2755 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2757 if (gimple_code (arg_def) == GIMPLE_PHI)
2758 record_phi_increments_1 (basis, arg_def);
2759 else
2761 slsr_cand_t arg_cand = base_cand_from_table (arg);
2762 widest_int diff = arg_cand->index - basis->index;
2763 record_increment (arg_cand, diff, PHI_ADJUST);
2769 /* Given phi statement PHI that hides a candidate from its BASIS, find
2770 the increments along each incoming arc (recursively handling additional
2771 phis that may be present) and record them. These increments are the
2772 difference in index between the index-adjusting statements and the
2773 index of the basis. */
2775 static void
2776 record_phi_increments (slsr_cand_t basis, gimple *phi)
2778 record_phi_increments_1 (basis, phi);
2779 clear_visited (as_a <gphi *> (phi));
2782 /* Determine how many times each unique increment occurs in the set
2783 of candidates rooted at C's parent, recording the data in the
2784 increment vector. For each unique increment I, if an initializer
2785 T_0 = stride * I is provided by a candidate that dominates all
2786 candidates with the same increment, also record T_0 for subsequent
2787 use. */
2789 static void
2790 record_increments (slsr_cand_t c)
2792 if (!cand_already_replaced (c))
2794 if (!phi_dependent_cand_p (c))
2795 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2796 else
2798 /* A candidate with a basis hidden by a phi will have one
2799 increment for its relationship to the index represented by
2800 the phi, and potentially additional increments along each
2801 incoming edge. For the root of the dependency tree (which
2802 has no basis), process just the initial index in case it has
2803 an initializer that can be used by subsequent candidates. */
2804 record_increment (c, c->index, NOT_PHI_ADJUST);
2806 if (c->basis)
2807 record_phi_increments (lookup_cand (c->basis),
2808 lookup_cand (c->def_phi)->cand_stmt);
2812 if (c->sibling)
2813 record_increments (lookup_cand (c->sibling));
2815 if (c->dependent)
2816 record_increments (lookup_cand (c->dependent));
2819 /* Recursive helper function for phi_incr_cost. */
2821 static int
2822 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2823 int *savings)
2825 unsigned i;
2826 int cost = 0;
2827 slsr_cand_t basis = lookup_cand (c->basis);
2828 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2830 if (phi_cand->visited)
2831 return 0;
2832 phi_cand->visited = 1;
2834 for (i = 0; i < gimple_phi_num_args (phi); i++)
2836 tree arg = gimple_phi_arg_def (phi, i);
2838 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2840 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2842 if (gimple_code (arg_def) == GIMPLE_PHI)
2844 int feeding_savings = 0;
2845 tree feeding_var = gimple_phi_result (arg_def);
2846 cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2847 if (uses_consumed_by_stmt (feeding_var, phi))
2848 *savings += feeding_savings;
2850 else
2852 slsr_cand_t arg_cand = base_cand_from_table (arg);
2853 widest_int diff = arg_cand->index - basis->index;
2855 if (incr == diff)
2857 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2858 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2859 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2860 if (uses_consumed_by_stmt (lhs, phi))
2861 *savings += stmt_cost (arg_cand->cand_stmt, true);
2867 return cost;
2870 /* Add up and return the costs of introducing add statements that
2871 require the increment INCR on behalf of candidate C and phi
2872 statement PHI. Accumulate into *SAVINGS the potential savings
2873 from removing existing statements that feed PHI and have no other
2874 uses. */
2876 static int
2877 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2878 int *savings)
2880 int retval = phi_incr_cost_1 (c, incr, phi, savings);
2881 clear_visited (as_a <gphi *> (phi));
2882 return retval;
2885 /* Return the first candidate in the tree rooted at C that has not
2886 already been replaced, favoring siblings over dependents. */
2888 static slsr_cand_t
2889 unreplaced_cand_in_tree (slsr_cand_t c)
2891 if (!cand_already_replaced (c))
2892 return c;
2894 if (c->sibling)
2896 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2897 if (sib)
2898 return sib;
2901 if (c->dependent)
2903 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2904 if (dep)
2905 return dep;
2908 return NULL;
2911 /* Return TRUE if the candidates in the tree rooted at C should be
2912 optimized for speed, else FALSE. We estimate this based on the block
2913 containing the most dominant candidate in the tree that has not yet
2914 been replaced. */
2916 static bool
2917 optimize_cands_for_speed_p (slsr_cand_t c)
2919 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2920 gcc_assert (c2);
2921 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2924 /* Add COST_IN to the lowest cost of any dependent path starting at
2925 candidate C or any of its siblings, counting only candidates along
2926 such paths with increment INCR. Assume that replacing a candidate
2927 reduces cost by REPL_SAVINGS. Also account for savings from any
2928 statements that would go dead. If COUNT_PHIS is true, include
2929 costs of introducing feeding statements for conditional candidates. */
2931 static int
2932 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2933 const widest_int &incr, bool count_phis)
2935 int local_cost, sib_cost, savings = 0;
2936 widest_int cand_incr = cand_abs_increment (c);
2938 if (cand_already_replaced (c))
2939 local_cost = cost_in;
2940 else if (incr == cand_incr)
2941 local_cost = cost_in - repl_savings - c->dead_savings;
2942 else
2943 local_cost = cost_in - c->dead_savings;
2945 if (count_phis
2946 && phi_dependent_cand_p (c)
2947 && !cand_already_replaced (c))
2949 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2950 local_cost += phi_incr_cost (c, incr, phi, &savings);
2952 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2953 local_cost -= savings;
2956 if (c->dependent)
2957 local_cost = lowest_cost_path (local_cost, repl_savings,
2958 lookup_cand (c->dependent), incr,
2959 count_phis);
2961 if (c->sibling)
2963 sib_cost = lowest_cost_path (cost_in, repl_savings,
2964 lookup_cand (c->sibling), incr,
2965 count_phis);
2966 local_cost = MIN (local_cost, sib_cost);
2969 return local_cost;
2972 /* Compute the total savings that would accrue from all replacements
2973 in the candidate tree rooted at C, counting only candidates with
2974 increment INCR. Assume that replacing a candidate reduces cost
2975 by REPL_SAVINGS. Also account for savings from statements that
2976 would go dead. */
2978 static int
2979 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2980 bool count_phis)
2982 int savings = 0;
2983 widest_int cand_incr = cand_abs_increment (c);
2985 if (incr == cand_incr && !cand_already_replaced (c))
2986 savings += repl_savings + c->dead_savings;
2988 if (count_phis
2989 && phi_dependent_cand_p (c)
2990 && !cand_already_replaced (c))
2992 int phi_savings = 0;
2993 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2994 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2996 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2997 savings += phi_savings;
3000 if (c->dependent)
3001 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
3002 count_phis);
3004 if (c->sibling)
3005 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
3006 count_phis);
3008 return savings;
3011 /* Use target-specific costs to determine and record which increments
3012 in the current candidate tree are profitable to replace, assuming
3013 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3014 the candidate tree.
3016 One slight limitation here is that we don't account for the possible
3017 introduction of casts in some cases. See replace_one_candidate for
3018 the cases where these are introduced. This should probably be cleaned
3019 up sometime. */
3021 static void
3022 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3024 unsigned i;
3026 for (i = 0; i < incr_vec_len; i++)
3028 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3030 /* If somehow this increment is bigger than a HWI, we won't
3031 be optimizing candidates that use it. And if the increment
3032 has a count of zero, nothing will be done with it. */
3033 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3034 incr_vec[i].cost = COST_INFINITE;
3036 /* Increments of 0, 1, and -1 are always profitable to replace,
3037 because they always replace a multiply or add with an add or
3038 copy, and may cause one or more existing instructions to go
3039 dead. Exception: -1 can't be assumed to be profitable for
3040 pointer addition. */
3041 else if (incr == 0
3042 || incr == 1
3043 || (incr == -1
3044 && !POINTER_TYPE_P (first_dep->cand_type)))
3045 incr_vec[i].cost = COST_NEUTRAL;
3047 /* If we need to add an initializer, give up if a cast from the
3048 candidate's type to its stride's type can lose precision.
3049 Note that this already takes into account that the stride may
3050 have been cast to a wider type, in which case this test won't
3051 fire. Example:
3053 short int _1;
3054 _2 = (int) _1;
3055 _3 = _2 * 10;
3056 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3057 _5 = _2 * 15;
3058 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3060 Although the stride was a short int initially, the stride
3061 used in the analysis has been widened to an int, and such
3062 widening will be done in the initializer as well. */
3063 else if (!incr_vec[i].initializer
3064 && TREE_CODE (first_dep->stride) != INTEGER_CST
3065 && !legal_cast_p_1 (first_dep->stride_type,
3066 TREE_TYPE (gimple_assign_lhs
3067 (first_dep->cand_stmt))))
3068 incr_vec[i].cost = COST_INFINITE;
3070 /* If we need to add an initializer, make sure we don't introduce
3071 a multiply by a pointer type, which can happen in certain cast
3072 scenarios. */
3073 else if (!incr_vec[i].initializer
3074 && TREE_CODE (first_dep->stride) != INTEGER_CST
3075 && POINTER_TYPE_P (first_dep->stride_type))
3076 incr_vec[i].cost = COST_INFINITE;
3078 /* For any other increment, if this is a multiply candidate, we
3079 must introduce a temporary T and initialize it with
3080 T_0 = stride * increment. When optimizing for speed, walk the
3081 candidate tree to calculate the best cost reduction along any
3082 path; if it offsets the fixed cost of inserting the initializer,
3083 replacing the increment is profitable. When optimizing for
3084 size, instead calculate the total cost reduction from replacing
3085 all candidates with this increment. */
3086 else if (first_dep->kind == CAND_MULT)
3088 int cost = mult_by_coeff_cost (incr, mode, speed);
3089 int repl_savings;
3091 if (tree_fits_shwi_p (first_dep->stride))
3093 HOST_WIDE_INT hwi_stride = tree_to_shwi (first_dep->stride);
3094 repl_savings = mult_by_coeff_cost (hwi_stride, mode, speed);
3096 else
3097 repl_savings = mul_cost (speed, mode);
3098 repl_savings -= add_cost (speed, mode);
3100 if (speed)
3101 cost = lowest_cost_path (cost, repl_savings, first_dep,
3102 incr_vec[i].incr, COUNT_PHIS);
3103 else
3104 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3105 COUNT_PHIS);
3107 incr_vec[i].cost = cost;
3110 /* If this is an add candidate, the initializer may already
3111 exist, so only calculate the cost of the initializer if it
3112 doesn't. We are replacing one add with another here, so the
3113 known replacement savings is zero. We will account for removal
3114 of dead instructions in lowest_cost_path or total_savings. */
3115 else
3117 int cost = 0;
3118 if (!incr_vec[i].initializer)
3119 cost = mult_by_coeff_cost (incr, mode, speed);
3121 if (speed)
3122 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3123 DONT_COUNT_PHIS);
3124 else
3125 cost -= total_savings (0, first_dep, incr_vec[i].incr,
3126 DONT_COUNT_PHIS);
3128 incr_vec[i].cost = cost;
3133 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3134 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3135 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3136 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3137 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3139 static basic_block
3140 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3141 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3143 basic_block ncd;
3145 if (!bb1)
3147 *where = c2;
3148 return bb2;
3151 if (!bb2)
3153 *where = c1;
3154 return bb1;
3157 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3159 /* If both candidates are in the same block, the earlier
3160 candidate wins. */
3161 if (bb1 == ncd && bb2 == ncd)
3163 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3164 *where = c2;
3165 else
3166 *where = c1;
3169 /* Otherwise, if one of them produced a candidate in the
3170 dominator, that one wins. */
3171 else if (bb1 == ncd)
3172 *where = c1;
3174 else if (bb2 == ncd)
3175 *where = c2;
3177 /* If neither matches the dominator, neither wins. */
3178 else
3179 *where = NULL;
3181 return ncd;
3184 /* Consider all candidates that feed PHI. Find the nearest common
3185 dominator of those candidates requiring the given increment INCR.
3186 Further find and return the nearest common dominator of this result
3187 with block NCD. If the returned block contains one or more of the
3188 candidates, return the earliest candidate in the block in *WHERE. */
3190 static basic_block
3191 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3192 basic_block ncd, slsr_cand_t *where)
3194 unsigned i;
3195 slsr_cand_t basis = lookup_cand (c->basis);
3196 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3198 for (i = 0; i < gimple_phi_num_args (phi); i++)
3200 tree arg = gimple_phi_arg_def (phi, i);
3202 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3204 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3206 if (gimple_code (arg_def) == GIMPLE_PHI)
3207 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3208 where);
3209 else
3211 slsr_cand_t arg_cand = base_cand_from_table (arg);
3212 widest_int diff = arg_cand->index - basis->index;
3213 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3215 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3216 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3221 return ncd;
3224 /* Consider the candidate C together with any candidates that feed
3225 C's phi dependence (if any). Find and return the nearest common
3226 dominator of those candidates requiring the given increment INCR.
3227 If the returned block contains one or more of the candidates,
3228 return the earliest candidate in the block in *WHERE. */
3230 static basic_block
3231 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3233 basic_block ncd = NULL;
3235 if (cand_abs_increment (c) == incr)
3237 ncd = gimple_bb (c->cand_stmt);
3238 *where = c;
3241 if (phi_dependent_cand_p (c))
3242 ncd = ncd_with_phi (c, incr,
3243 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3244 ncd, where);
3246 return ncd;
3249 /* Consider all candidates in the tree rooted at C for which INCR
3250 represents the required increment of C relative to its basis.
3251 Find and return the basic block that most nearly dominates all
3252 such candidates. If the returned block contains one or more of
3253 the candidates, return the earliest candidate in the block in
3254 *WHERE. */
3256 static basic_block
3257 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3258 slsr_cand_t *where)
3260 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3261 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3263 /* First find the NCD of all siblings and dependents. */
3264 if (c->sibling)
3265 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3266 incr, &sib_where);
3267 if (c->dependent)
3268 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3269 incr, &dep_where);
3270 if (!sib_ncd && !dep_ncd)
3272 new_where = NULL;
3273 ncd = NULL;
3275 else if (sib_ncd && !dep_ncd)
3277 new_where = sib_where;
3278 ncd = sib_ncd;
3280 else if (dep_ncd && !sib_ncd)
3282 new_where = dep_where;
3283 ncd = dep_ncd;
3285 else
3286 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3287 dep_where, &new_where);
3289 /* If the candidate's increment doesn't match the one we're interested
3290 in (and nor do any increments for feeding defs of a phi-dependence),
3291 then the result depends only on siblings and dependents. */
3292 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3294 if (!this_ncd || cand_already_replaced (c))
3296 *where = new_where;
3297 return ncd;
3300 /* Otherwise, compare this candidate with the result from all siblings
3301 and dependents. */
3302 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3304 return ncd;
3307 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3309 static inline bool
3310 profitable_increment_p (unsigned index)
3312 return (incr_vec[index].cost <= COST_NEUTRAL);
3315 /* For each profitable increment in the increment vector not equal to
3316 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3317 dominator of all statements in the candidate chain rooted at C
3318 that require that increment, and insert an initializer
3319 T_0 = stride * increment at that location. Record T_0 with the
3320 increment record. */
3322 static void
3323 insert_initializers (slsr_cand_t c)
3325 unsigned i;
3327 for (i = 0; i < incr_vec_len; i++)
3329 basic_block bb;
3330 slsr_cand_t where = NULL;
3331 gassign *init_stmt;
3332 gassign *cast_stmt = NULL;
3333 tree new_name, incr_tree, init_stride;
3334 widest_int incr = incr_vec[i].incr;
3336 if (!profitable_increment_p (i)
3337 || incr == 1
3338 || (incr == -1
3339 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3340 || incr == 0)
3341 continue;
3343 /* We may have already identified an existing initializer that
3344 will suffice. */
3345 if (incr_vec[i].initializer)
3347 if (dump_file && (dump_flags & TDF_DETAILS))
3349 fputs ("Using existing initializer: ", dump_file);
3350 print_gimple_stmt (dump_file,
3351 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3352 0, 0);
3354 continue;
3357 /* Find the block that most closely dominates all candidates
3358 with this increment. If there is at least one candidate in
3359 that block, the earliest one will be returned in WHERE. */
3360 bb = nearest_common_dominator_for_cands (c, incr, &where);
3362 /* If the NCD is not dominated by the block containing the
3363 definition of the stride, we can't legally insert a
3364 single initializer. Mark the increment as unprofitable
3365 so we don't make any replacements. FIXME: Multiple
3366 initializers could be placed with more analysis. */
3367 gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3368 basic_block stride_bb = gimple_bb (stride_def);
3370 if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3373 fprintf (dump_file,
3374 "Initializer #%d cannot be legally placed\n", i);
3375 incr_vec[i].cost = COST_INFINITE;
3376 continue;
3379 /* If the nominal stride has a different type than the recorded
3380 stride type, build a cast from the nominal stride to that type. */
3381 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3383 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3384 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3386 else
3387 init_stride = c->stride;
3389 /* Create a new SSA name to hold the initializer's value. */
3390 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3391 incr_vec[i].initializer = new_name;
3393 /* Create the initializer and insert it in the latest possible
3394 dominating position. */
3395 incr_tree = wide_int_to_tree (c->stride_type, incr);
3396 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3397 init_stride, incr_tree);
3398 if (where)
3400 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3401 location_t loc = gimple_location (where->cand_stmt);
3403 if (cast_stmt)
3405 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3406 gimple_set_location (cast_stmt, loc);
3409 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3410 gimple_set_location (init_stmt, loc);
3412 else
3414 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3415 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3416 location_t loc = gimple_location (basis_stmt);
3418 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3420 if (cast_stmt)
3422 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3423 gimple_set_location (cast_stmt, loc);
3425 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3427 else
3429 if (cast_stmt)
3431 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3432 gimple_set_location (cast_stmt, loc);
3434 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
3437 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3440 if (dump_file && (dump_flags & TDF_DETAILS))
3442 if (cast_stmt)
3444 fputs ("Inserting stride cast: ", dump_file);
3445 print_gimple_stmt (dump_file, cast_stmt, 0);
3447 fputs ("Inserting initializer: ", dump_file);
3448 print_gimple_stmt (dump_file, init_stmt, 0);
3453 /* Recursive helper function for all_phi_incrs_profitable. */
3455 static bool
3456 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3458 unsigned i;
3459 slsr_cand_t basis = lookup_cand (c->basis);
3460 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3462 if (phi_cand->visited)
3463 return true;
3465 phi_cand->visited = 1;
3466 (*spread)++;
3468 /* If the basis doesn't dominate the PHI (including when the PHI is
3469 in the same block as the basis), we won't be able to create a PHI
3470 using the basis here. */
3471 basic_block basis_bb = gimple_bb (basis->cand_stmt);
3472 basic_block phi_bb = gimple_bb (phi);
3474 if (phi_bb == basis_bb
3475 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3476 return false;
3478 for (i = 0; i < gimple_phi_num_args (phi); i++)
3480 /* If the PHI arg resides in a block not dominated by the basis,
3481 we won't be able to create a PHI using the basis here. */
3482 basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3484 if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3485 return false;
3487 tree arg = gimple_phi_arg_def (phi, i);
3489 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3491 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3493 if (gimple_code (arg_def) == GIMPLE_PHI)
3495 if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def),
3496 spread)
3497 || *spread > MAX_SPREAD)
3498 return false;
3500 else
3502 int j;
3503 slsr_cand_t arg_cand = base_cand_from_table (arg);
3504 widest_int increment = arg_cand->index - basis->index;
3506 if (!address_arithmetic_p && wi::neg_p (increment))
3507 increment = -increment;
3509 j = incr_vec_index (increment);
3511 if (dump_file && (dump_flags & TDF_DETAILS))
3513 fprintf (dump_file, " Conditional candidate %d, phi: ",
3514 c->cand_num);
3515 print_gimple_stmt (dump_file, phi, 0);
3516 fputs (" increment: ", dump_file);
3517 print_decs (increment, dump_file);
3518 if (j < 0)
3519 fprintf (dump_file,
3520 "\n Not replaced; incr_vec overflow.\n");
3521 else {
3522 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3523 if (profitable_increment_p (j))
3524 fputs (" Replacing...\n", dump_file);
3525 else
3526 fputs (" Not replaced.\n", dump_file);
3530 if (j < 0 || !profitable_increment_p (j))
3531 return false;
3536 return true;
3539 /* Return TRUE iff all required increments for candidates feeding PHI
3540 are profitable (and legal!) to replace on behalf of candidate C. */
3542 static bool
3543 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3545 int spread = 0;
3546 bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3547 clear_visited (phi);
3548 return retval;
3551 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3552 type TO_TYPE, and insert it in front of the statement represented
3553 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3554 the new SSA name. */
3556 static tree
3557 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3559 tree cast_lhs;
3560 gassign *cast_stmt;
3561 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3563 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3564 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3565 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3566 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3568 if (dump_file && (dump_flags & TDF_DETAILS))
3570 fputs (" Inserting: ", dump_file);
3571 print_gimple_stmt (dump_file, cast_stmt, 0);
3574 return cast_lhs;
3577 /* Replace the RHS of the statement represented by candidate C with
3578 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3579 leave C unchanged or just interchange its operands. The original
3580 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3581 If the replacement was made and we are doing a details dump,
3582 return the revised statement, else NULL. */
3584 static gimple *
3585 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3586 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3587 slsr_cand_t c)
3589 if (new_code != old_code
3590 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3591 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3592 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3593 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3595 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3596 slsr_cand_t cc = c;
3597 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3598 update_stmt (gsi_stmt (gsi));
3599 c->cand_stmt = gsi_stmt (gsi);
3600 while (cc->next_interp)
3602 cc = lookup_cand (cc->next_interp);
3603 cc->cand_stmt = gsi_stmt (gsi);
3606 if (dump_file && (dump_flags & TDF_DETAILS))
3607 return gsi_stmt (gsi);
3610 else if (dump_file && (dump_flags & TDF_DETAILS))
3611 fputs (" (duplicate, not actually replacing)\n", dump_file);
3613 return NULL;
3616 /* Strength-reduce the statement represented by candidate C by replacing
3617 it with an equivalent addition or subtraction. I is the index into
3618 the increment vector identifying C's increment. NEW_VAR is used to
3619 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3620 is the rhs1 to use in creating the add/subtract. */
3622 static void
3623 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3625 gimple *stmt_to_print = NULL;
3626 tree orig_rhs1, orig_rhs2;
3627 tree rhs2;
3628 enum tree_code orig_code, repl_code;
3629 widest_int cand_incr;
3631 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3632 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3633 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3634 cand_incr = cand_increment (c);
3636 if (dump_file && (dump_flags & TDF_DETAILS))
3638 fputs ("Replacing: ", dump_file);
3639 print_gimple_stmt (dump_file, c->cand_stmt, 0);
3640 stmt_to_print = c->cand_stmt;
3643 if (address_arithmetic_p)
3644 repl_code = POINTER_PLUS_EXPR;
3645 else
3646 repl_code = PLUS_EXPR;
3648 /* If the increment has an initializer T_0, replace the candidate
3649 statement with an add of the basis name and the initializer. */
3650 if (incr_vec[i].initializer)
3652 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3653 tree orig_type = TREE_TYPE (orig_rhs2);
3655 if (types_compatible_p (orig_type, init_type))
3656 rhs2 = incr_vec[i].initializer;
3657 else
3658 rhs2 = introduce_cast_before_cand (c, orig_type,
3659 incr_vec[i].initializer);
3661 if (incr_vec[i].incr != cand_incr)
3663 gcc_assert (repl_code == PLUS_EXPR);
3664 repl_code = MINUS_EXPR;
3667 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3668 orig_code, orig_rhs1, orig_rhs2,
3672 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3673 with a subtract of the stride from the basis name, a copy
3674 from the basis name, or an add of the stride to the basis
3675 name, respectively. It may be necessary to introduce a
3676 cast (or reuse an existing cast). */
3677 else if (cand_incr == 1)
3679 tree stride_type = TREE_TYPE (c->stride);
3680 tree orig_type = TREE_TYPE (orig_rhs2);
3682 if (types_compatible_p (orig_type, stride_type))
3683 rhs2 = c->stride;
3684 else
3685 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3687 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3688 orig_code, orig_rhs1, orig_rhs2,
3692 else if (cand_incr == -1)
3694 tree stride_type = TREE_TYPE (c->stride);
3695 tree orig_type = TREE_TYPE (orig_rhs2);
3696 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3698 if (types_compatible_p (orig_type, stride_type))
3699 rhs2 = c->stride;
3700 else
3701 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3703 if (orig_code != MINUS_EXPR
3704 || !operand_equal_p (basis_name, orig_rhs1, 0)
3705 || !operand_equal_p (rhs2, orig_rhs2, 0))
3707 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3708 slsr_cand_t cc = c;
3709 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3710 update_stmt (gsi_stmt (gsi));
3711 c->cand_stmt = gsi_stmt (gsi);
3712 while (cc->next_interp)
3714 cc = lookup_cand (cc->next_interp);
3715 cc->cand_stmt = gsi_stmt (gsi);
3718 if (dump_file && (dump_flags & TDF_DETAILS))
3719 stmt_to_print = gsi_stmt (gsi);
3721 else if (dump_file && (dump_flags & TDF_DETAILS))
3722 fputs (" (duplicate, not actually replacing)\n", dump_file);
3725 else if (cand_incr == 0)
3727 tree lhs = gimple_assign_lhs (c->cand_stmt);
3728 tree lhs_type = TREE_TYPE (lhs);
3729 tree basis_type = TREE_TYPE (basis_name);
3731 if (types_compatible_p (lhs_type, basis_type))
3733 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3734 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3735 slsr_cand_t cc = c;
3736 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3737 gsi_replace (&gsi, copy_stmt, false);
3738 c->cand_stmt = copy_stmt;
3739 while (cc->next_interp)
3741 cc = lookup_cand (cc->next_interp);
3742 cc->cand_stmt = copy_stmt;
3745 if (dump_file && (dump_flags & TDF_DETAILS))
3746 stmt_to_print = copy_stmt;
3748 else
3750 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3751 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3752 slsr_cand_t cc = c;
3753 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3754 gsi_replace (&gsi, cast_stmt, false);
3755 c->cand_stmt = cast_stmt;
3756 while (cc->next_interp)
3758 cc = lookup_cand (cc->next_interp);
3759 cc->cand_stmt = cast_stmt;
3762 if (dump_file && (dump_flags & TDF_DETAILS))
3763 stmt_to_print = cast_stmt;
3766 else
3767 gcc_unreachable ();
3769 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3771 fputs ("With: ", dump_file);
3772 print_gimple_stmt (dump_file, stmt_to_print, 0);
3773 fputs ("\n", dump_file);
3777 /* For each candidate in the tree rooted at C, replace it with
3778 an increment if such has been shown to be profitable. */
3780 static void
3781 replace_profitable_candidates (slsr_cand_t c)
3783 if (!cand_already_replaced (c))
3785 widest_int increment = cand_abs_increment (c);
3786 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3787 int i;
3789 i = incr_vec_index (increment);
3791 /* Only process profitable increments. Nothing useful can be done
3792 to a cast or copy. */
3793 if (i >= 0
3794 && profitable_increment_p (i)
3795 && orig_code != SSA_NAME
3796 && !CONVERT_EXPR_CODE_P (orig_code))
3798 if (phi_dependent_cand_p (c))
3800 gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3802 if (all_phi_incrs_profitable (c, phi))
3804 /* Look up the LHS SSA name from C's basis. This will be
3805 the RHS1 of the adds we will introduce to create new
3806 phi arguments. */
3807 slsr_cand_t basis = lookup_cand (c->basis);
3808 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3810 /* Create a new phi statement that will represent C's true
3811 basis after the transformation is complete. */
3812 location_t loc = gimple_location (c->cand_stmt);
3813 tree name = create_phi_basis (c, phi, basis_name,
3814 loc, UNKNOWN_STRIDE);
3816 /* Replace C with an add of the new basis phi and the
3817 increment. */
3818 replace_one_candidate (c, i, name);
3821 else
3823 slsr_cand_t basis = lookup_cand (c->basis);
3824 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3825 replace_one_candidate (c, i, basis_name);
3830 if (c->sibling)
3831 replace_profitable_candidates (lookup_cand (c->sibling));
3833 if (c->dependent)
3834 replace_profitable_candidates (lookup_cand (c->dependent));
3837 /* Analyze costs of related candidates in the candidate vector,
3838 and make beneficial replacements. */
3840 static void
3841 analyze_candidates_and_replace (void)
3843 unsigned i;
3844 slsr_cand_t c;
3846 /* Each candidate that has a null basis and a non-null
3847 dependent is the root of a tree of related statements.
3848 Analyze each tree to determine a subset of those
3849 statements that can be replaced with maximum benefit. */
3850 FOR_EACH_VEC_ELT (cand_vec, i, c)
3852 slsr_cand_t first_dep;
3854 if (c->basis != 0 || c->dependent == 0)
3855 continue;
3857 if (dump_file && (dump_flags & TDF_DETAILS))
3858 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3859 c->cand_num);
3861 first_dep = lookup_cand (c->dependent);
3863 /* If this is a chain of CAND_REFs, unconditionally replace
3864 each of them with a strength-reduced data reference. */
3865 if (c->kind == CAND_REF)
3866 replace_refs (c);
3868 /* If the common stride of all related candidates is a known
3869 constant, each candidate without a phi-dependence can be
3870 profitably replaced. Each replaces a multiply by a single
3871 add, with the possibility that a feeding add also goes dead.
3872 A candidate with a phi-dependence is replaced only if the
3873 compensation code it requires is offset by the strength
3874 reduction savings. */
3875 else if (TREE_CODE (c->stride) == INTEGER_CST)
3876 replace_uncond_cands_and_profitable_phis (first_dep);
3878 /* When the stride is an SSA name, it may still be profitable
3879 to replace some or all of the dependent candidates, depending
3880 on whether the introduced increments can be reused, or are
3881 less expensive to calculate than the replaced statements. */
3882 else
3884 machine_mode mode;
3885 bool speed;
3887 /* Determine whether we'll be generating pointer arithmetic
3888 when replacing candidates. */
3889 address_arithmetic_p = (c->kind == CAND_ADD
3890 && POINTER_TYPE_P (c->cand_type));
3892 /* If all candidates have already been replaced under other
3893 interpretations, nothing remains to be done. */
3894 if (!count_candidates (c))
3895 continue;
3897 /* Construct an array of increments for this candidate chain. */
3898 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3899 incr_vec_len = 0;
3900 record_increments (c);
3902 /* Determine which increments are profitable to replace. */
3903 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3904 speed = optimize_cands_for_speed_p (c);
3905 analyze_increments (first_dep, mode, speed);
3907 /* Insert initializers of the form T_0 = stride * increment
3908 for use in profitable replacements. */
3909 insert_initializers (first_dep);
3910 dump_incr_vec ();
3912 /* Perform the replacements. */
3913 replace_profitable_candidates (first_dep);
3914 free (incr_vec);
3918 /* For conditional candidates, we may have uncommitted insertions
3919 on edges to clean up. */
3920 gsi_commit_edge_inserts ();
3923 namespace {
3925 const pass_data pass_data_strength_reduction =
3927 GIMPLE_PASS, /* type */
3928 "slsr", /* name */
3929 OPTGROUP_NONE, /* optinfo_flags */
3930 TV_GIMPLE_SLSR, /* tv_id */
3931 ( PROP_cfg | PROP_ssa ), /* properties_required */
3932 0, /* properties_provided */
3933 0, /* properties_destroyed */
3934 0, /* todo_flags_start */
3935 0, /* todo_flags_finish */
3938 class pass_strength_reduction : public gimple_opt_pass
3940 public:
3941 pass_strength_reduction (gcc::context *ctxt)
3942 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3945 /* opt_pass methods: */
3946 virtual bool gate (function *) { return flag_tree_slsr; }
3947 virtual unsigned int execute (function *);
3949 }; // class pass_strength_reduction
3951 unsigned
3952 pass_strength_reduction::execute (function *fun)
3954 /* Create the obstack where candidates will reside. */
3955 gcc_obstack_init (&cand_obstack);
3957 /* Allocate the candidate vector. */
3958 cand_vec.create (128);
3960 /* Allocate the mapping from statements to candidate indices. */
3961 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3963 /* Create the obstack where candidate chains will reside. */
3964 gcc_obstack_init (&chain_obstack);
3966 /* Allocate the mapping from base expressions to candidate chains. */
3967 base_cand_map = new hash_table<cand_chain_hasher> (500);
3969 /* Allocate the mapping from bases to alternative bases. */
3970 alt_base_map = new hash_map<tree, tree>;
3972 /* Initialize the loop optimizer. We need to detect flow across
3973 back edges, and this gives us dominator information as well. */
3974 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3976 /* Walk the CFG in predominator order looking for strength reduction
3977 candidates. */
3978 find_candidates_dom_walker (CDI_DOMINATORS)
3979 .walk (fun->cfg->x_entry_block_ptr);
3981 if (dump_file && (dump_flags & TDF_DETAILS))
3983 dump_cand_vec ();
3984 dump_cand_chains ();
3987 delete alt_base_map;
3988 free_affine_expand_cache (&name_expansions);
3990 /* Analyze costs and make appropriate replacements. */
3991 analyze_candidates_and_replace ();
3993 loop_optimizer_finalize ();
3994 delete base_cand_map;
3995 base_cand_map = NULL;
3996 obstack_free (&chain_obstack, NULL);
3997 delete stmt_cand_map;
3998 cand_vec.release ();
3999 obstack_free (&cand_obstack, NULL);
4001 return 0;
4004 } // anon namespace
4006 gimple_opt_pass *
4007 make_pass_strength_reduction (gcc::context *ctxt)
4009 return new pass_strength_reduction (ctxt);