2015-09-25 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blob7d549ca1cbe5e5c9850f774fc2204b0cd003b7d5
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "alias.h"
40 #include "backend.h"
41 #include "cfghooks.h"
42 #include "tree.h"
43 #include "gimple.h"
44 #include "rtl.h"
45 #include "ssa.h"
46 #include "options.h"
47 #include "fold-const.h"
48 #include "internal-fn.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "flags.h"
53 #include "insn-config.h"
54 #include "expmed.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "calls.h"
58 #include "emit-rtl.h"
59 #include "varasm.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "tree-pass.h"
63 #include "cfgloop.h"
64 #include "gimple-pretty-print.h"
65 #include "tree-cfg.h"
66 #include "domwalk.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "tree-affine.h"
70 #include "wide-int-print.h"
71 #include "builtins.h"
73 /* Information about a strength reduction candidate. Each statement
74 in the candidate table represents an expression of one of the
75 following forms (the special case of CAND_REF will be described
76 later):
78 (CAND_MULT) S1: X = (B + i) * S
79 (CAND_ADD) S1: X = B + (i * S)
81 Here X and B are SSA names, i is an integer constant, and S is
82 either an SSA name or a constant. We call B the "base," i the
83 "index", and S the "stride."
85 Any statement S0 that dominates S1 and is of the form:
87 (CAND_MULT) S0: Y = (B + i') * S
88 (CAND_ADD) S0: Y = B + (i' * S)
90 is called a "basis" for S1. In both cases, S1 may be replaced by
92 S1': X = Y + (i - i') * S,
94 where (i - i') * S is folded to the extent possible.
96 All gimple statements are visited in dominator order, and each
97 statement that may contribute to one of the forms of S1 above is
98 given at least one entry in the candidate table. Such statements
99 include addition, pointer addition, subtraction, multiplication,
100 negation, copies, and nontrivial type casts. If a statement may
101 represent more than one expression of the forms of S1 above,
102 multiple "interpretations" are stored in the table and chained
103 together. Examples:
105 * An add of two SSA names may treat either operand as the base.
106 * A multiply of two SSA names, likewise.
107 * A copy or cast may be thought of as either a CAND_MULT with
108 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
110 Candidate records are allocated from an obstack. They are addressed
111 both from a hash table keyed on S1, and from a vector of candidate
112 pointers arranged in predominator order.
114 Opportunity note
115 ----------------
116 Currently we don't recognize:
118 S0: Y = (S * i') - B
119 S1: X = (S * i) - B
121 as a strength reduction opportunity, even though this S1 would
122 also be replaceable by the S1' above. This can be added if it
123 comes up in practice.
125 Strength reduction in addressing
126 --------------------------------
127 There is another kind of candidate known as CAND_REF. A CAND_REF
128 describes a statement containing a memory reference having
129 complex addressing that might benefit from strength reduction.
130 Specifically, we are interested in references for which
131 get_inner_reference returns a base address, offset, and bitpos as
132 follows:
134 base: MEM_REF (T1, C1)
135 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
136 bitpos: C4 * BITS_PER_UNIT
138 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
139 arbitrary integer constants. Note that C2 may be zero, in which
140 case the offset will be MULT_EXPR (T2, C3).
142 When this pattern is recognized, the original memory reference
143 can be replaced with:
145 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
146 C1 + (C2 * C3) + C4)
148 which distributes the multiply to allow constant folding. When
149 two or more addressing expressions can be represented by MEM_REFs
150 of this form, differing only in the constants C1, C2, and C4,
151 making this substitution produces more efficient addressing during
152 the RTL phases. When there are not at least two expressions with
153 the same values of T1, T2, and C3, there is nothing to be gained
154 by the replacement.
156 Strength reduction of CAND_REFs uses the same infrastructure as
157 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
158 field, MULT_EXPR (T2, C3) in the stride (S) field, and
159 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
160 is thus another CAND_REF with the same B and S values. When at
161 least two CAND_REFs are chained together using the basis relation,
162 each of them is replaced as above, resulting in improved code
163 generation for addressing.
165 Conditional candidates
166 ======================
168 Conditional candidates are best illustrated with an example.
169 Consider the code sequence:
171 (1) x_0 = ...;
172 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
173 if (...)
174 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
175 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
176 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
177 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
179 Here strength reduction is complicated by the uncertain value of x_2.
180 A legitimate transformation is:
182 (1) x_0 = ...;
183 (2) a_0 = x_0 * 5;
184 if (...)
186 (3) [x_1 = x_0 + 1;]
187 (3a) t_1 = a_0 + 5;
189 (4) [x_2 = PHI <x_0, x_1>;]
190 (4a) t_2 = PHI <a_0, t_1>;
191 (5) [x_3 = x_2 + 1;]
192 (6r) a_1 = t_2 + 5;
194 where the bracketed instructions may go dead.
196 To recognize this opportunity, we have to observe that statement (6)
197 has a "hidden basis" (2). The hidden basis is unlike a normal basis
198 in that the statement and the hidden basis have different base SSA
199 names (x_2 and x_0, respectively). The relationship is established
200 when a statement's base name (x_2) is defined by a phi statement (4),
201 each argument of which (x_0, x_1) has an identical "derived base name."
202 If the argument is defined by a candidate (as x_1 is by (3)) that is a
203 CAND_ADD having a stride of 1, the derived base name of the argument is
204 the base name of the candidate (x_0). Otherwise, the argument itself
205 is its derived base name (as is the case with argument x_0).
207 The hidden basis for statement (6) is the nearest dominating candidate
208 whose base name is the derived base name (x_0) of the feeding phi (4),
209 and whose stride is identical to that of the statement. We can then
210 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
211 allowing the final replacement of (6) by the strength-reduced (6r).
213 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
214 A CAND_PHI is not a candidate for replacement, but is maintained in the
215 candidate table to ease discovery of hidden bases. Any phi statement
216 whose arguments share a common derived base name is entered into the
217 table with the derived base name, an (arbitrary) index of zero, and a
218 stride of 1. A statement with a hidden basis can then be detected by
219 simply looking up its feeding phi definition in the candidate table,
220 extracting the derived base name, and searching for a basis in the
221 usual manner after substituting the derived base name.
223 Note that the transformation is only valid when the original phi and
224 the statements that define the phi's arguments are all at the same
225 position in the loop hierarchy. */
228 /* Index into the candidate vector, offset by 1. VECs are zero-based,
229 while cand_idx's are one-based, with zero indicating null. */
230 typedef unsigned cand_idx;
232 /* The kind of candidate. */
233 enum cand_kind
235 CAND_MULT,
236 CAND_ADD,
237 CAND_REF,
238 CAND_PHI
241 struct slsr_cand_d
243 /* The candidate statement S1. */
244 gimple *cand_stmt;
246 /* The base expression B: often an SSA name, but not always. */
247 tree base_expr;
249 /* The stride S. */
250 tree stride;
252 /* The index constant i. */
253 widest_int index;
255 /* The type of the candidate. This is normally the type of base_expr,
256 but casts may have occurred when combining feeding instructions.
257 A candidate can only be a basis for candidates of the same final type.
258 (For CAND_REFs, this is the type to be used for operand 1 of the
259 replacement MEM_REF.) */
260 tree cand_type;
262 /* The kind of candidate (CAND_MULT, etc.). */
263 enum cand_kind kind;
265 /* Index of this candidate in the candidate vector. */
266 cand_idx cand_num;
268 /* Index of the next candidate record for the same statement.
269 A statement may be useful in more than one way (e.g., due to
270 commutativity). So we can have multiple "interpretations"
271 of a statement. */
272 cand_idx next_interp;
274 /* Index of the basis statement S0, if any, in the candidate vector. */
275 cand_idx basis;
277 /* First candidate for which this candidate is a basis, if one exists. */
278 cand_idx dependent;
280 /* Next candidate having the same basis as this one. */
281 cand_idx sibling;
283 /* If this is a conditional candidate, the CAND_PHI candidate
284 that defines the base SSA name B. */
285 cand_idx def_phi;
287 /* Savings that can be expected from eliminating dead code if this
288 candidate is replaced. */
289 int dead_savings;
292 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
293 typedef const struct slsr_cand_d *const_slsr_cand_t;
295 /* Pointers to candidates are chained together as part of a mapping
296 from base expressions to the candidates that use them. */
298 struct cand_chain_d
300 /* Base expression for the chain of candidates: often, but not
301 always, an SSA name. */
302 tree base_expr;
304 /* Pointer to a candidate. */
305 slsr_cand_t cand;
307 /* Chain pointer. */
308 struct cand_chain_d *next;
312 typedef struct cand_chain_d cand_chain, *cand_chain_t;
313 typedef const struct cand_chain_d *const_cand_chain_t;
315 /* Information about a unique "increment" associated with candidates
316 having an SSA name for a stride. An increment is the difference
317 between the index of the candidate and the index of its basis,
318 i.e., (i - i') as discussed in the module commentary.
320 When we are not going to generate address arithmetic we treat
321 increments that differ only in sign as the same, allowing sharing
322 of the cost of initializers. The absolute value of the increment
323 is stored in the incr_info. */
325 struct incr_info_d
327 /* The increment that relates a candidate to its basis. */
328 widest_int incr;
330 /* How many times the increment occurs in the candidate tree. */
331 unsigned count;
333 /* Cost of replacing candidates using this increment. Negative and
334 zero costs indicate replacement should be performed. */
335 int cost;
337 /* If this increment is profitable but is not -1, 0, or 1, it requires
338 an initializer T_0 = stride * incr to be found or introduced in the
339 nearest common dominator of all candidates. This field holds T_0
340 for subsequent use. */
341 tree initializer;
343 /* If the initializer was found to already exist, this is the block
344 where it was found. */
345 basic_block init_bb;
348 typedef struct incr_info_d incr_info, *incr_info_t;
350 /* Candidates are maintained in a vector. If candidate X dominates
351 candidate Y, then X appears before Y in the vector; but the
352 converse does not necessarily hold. */
353 static vec<slsr_cand_t> cand_vec;
355 enum cost_consts
357 COST_NEUTRAL = 0,
358 COST_INFINITE = 1000
361 enum stride_status
363 UNKNOWN_STRIDE = 0,
364 KNOWN_STRIDE = 1
367 enum phi_adjust_status
369 NOT_PHI_ADJUST = 0,
370 PHI_ADJUST = 1
373 enum count_phis_status
375 DONT_COUNT_PHIS = 0,
376 COUNT_PHIS = 1
379 /* Pointer map embodying a mapping from statements to candidates. */
380 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
382 /* Obstack for candidates. */
383 static struct obstack cand_obstack;
385 /* Obstack for candidate chains. */
386 static struct obstack chain_obstack;
388 /* An array INCR_VEC of incr_infos is used during analysis of related
389 candidates having an SSA name for a stride. INCR_VEC_LEN describes
390 its current length. MAX_INCR_VEC_LEN is used to avoid costly
391 pathological cases. */
392 static incr_info_t incr_vec;
393 static unsigned incr_vec_len;
394 const int MAX_INCR_VEC_LEN = 16;
396 /* For a chain of candidates with unknown stride, indicates whether or not
397 we must generate pointer arithmetic when replacing statements. */
398 static bool address_arithmetic_p;
400 /* Forward function declarations. */
401 static slsr_cand_t base_cand_from_table (tree);
402 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
403 static bool legal_cast_p_1 (tree, tree);
405 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
407 static slsr_cand_t
408 lookup_cand (cand_idx idx)
410 return cand_vec[idx - 1];
413 /* Helper for hashing a candidate chain header. */
415 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
417 static inline hashval_t hash (const cand_chain *);
418 static inline bool equal (const cand_chain *, const cand_chain *);
421 inline hashval_t
422 cand_chain_hasher::hash (const cand_chain *p)
424 tree base_expr = p->base_expr;
425 return iterative_hash_expr (base_expr, 0);
428 inline bool
429 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
431 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
434 /* Hash table embodying a mapping from base exprs to chains of candidates. */
435 static hash_table<cand_chain_hasher> *base_cand_map;
437 /* Pointer map used by tree_to_aff_combination_expand. */
438 static hash_map<tree, name_expansion *> *name_expansions;
439 /* Pointer map embodying a mapping from bases to alternative bases. */
440 static hash_map<tree, tree> *alt_base_map;
442 /* Given BASE, use the tree affine combiniation facilities to
443 find the underlying tree expression for BASE, with any
444 immediate offset excluded.
446 N.B. we should eliminate this backtracking with better forward
447 analysis in a future release. */
449 static tree
450 get_alternative_base (tree base)
452 tree *result = alt_base_map->get (base);
454 if (result == NULL)
456 tree expr;
457 aff_tree aff;
459 tree_to_aff_combination_expand (base, TREE_TYPE (base),
460 &aff, &name_expansions);
461 aff.offset = 0;
462 expr = aff_combination_to_tree (&aff);
464 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
466 return expr == base ? NULL : expr;
469 return *result;
472 /* Look in the candidate table for a CAND_PHI that defines BASE and
473 return it if found; otherwise return NULL. */
475 static cand_idx
476 find_phi_def (tree base)
478 slsr_cand_t c;
480 if (TREE_CODE (base) != SSA_NAME)
481 return 0;
483 c = base_cand_from_table (base);
485 if (!c || c->kind != CAND_PHI)
486 return 0;
488 return c->cand_num;
491 /* Helper routine for find_basis_for_candidate. May be called twice:
492 once for the candidate's base expr, and optionally again either for
493 the candidate's phi definition or for a CAND_REF's alternative base
494 expression. */
496 static slsr_cand_t
497 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
499 cand_chain mapping_key;
500 cand_chain_t chain;
501 slsr_cand_t basis = NULL;
503 // Limit potential of N^2 behavior for long candidate chains.
504 int iters = 0;
505 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
507 mapping_key.base_expr = base_expr;
508 chain = base_cand_map->find (&mapping_key);
510 for (; chain && iters < max_iters; chain = chain->next, ++iters)
512 slsr_cand_t one_basis = chain->cand;
514 if (one_basis->kind != c->kind
515 || one_basis->cand_stmt == c->cand_stmt
516 || !operand_equal_p (one_basis->stride, c->stride, 0)
517 || !types_compatible_p (one_basis->cand_type, c->cand_type)
518 || !dominated_by_p (CDI_DOMINATORS,
519 gimple_bb (c->cand_stmt),
520 gimple_bb (one_basis->cand_stmt)))
521 continue;
523 if (!basis || basis->cand_num < one_basis->cand_num)
524 basis = one_basis;
527 return basis;
530 /* Use the base expr from candidate C to look for possible candidates
531 that can serve as a basis for C. Each potential basis must also
532 appear in a block that dominates the candidate statement and have
533 the same stride and type. If more than one possible basis exists,
534 the one with highest index in the vector is chosen; this will be
535 the most immediately dominating basis. */
537 static int
538 find_basis_for_candidate (slsr_cand_t c)
540 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
542 /* If a candidate doesn't have a basis using its base expression,
543 it may have a basis hidden by one or more intervening phis. */
544 if (!basis && c->def_phi)
546 basic_block basis_bb, phi_bb;
547 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
548 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
550 if (basis)
552 /* A hidden basis must dominate the phi-definition of the
553 candidate's base name. */
554 phi_bb = gimple_bb (phi_cand->cand_stmt);
555 basis_bb = gimple_bb (basis->cand_stmt);
557 if (phi_bb == basis_bb
558 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
560 basis = NULL;
561 c->basis = 0;
564 /* If we found a hidden basis, estimate additional dead-code
565 savings if the phi and its feeding statements can be removed. */
566 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
567 c->dead_savings += phi_cand->dead_savings;
571 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
573 tree alt_base_expr = get_alternative_base (c->base_expr);
574 if (alt_base_expr)
575 basis = find_basis_for_base_expr (c, alt_base_expr);
578 if (basis)
580 c->sibling = basis->dependent;
581 basis->dependent = c->cand_num;
582 return basis->cand_num;
585 return 0;
588 /* Record a mapping from BASE to C, indicating that C may potentially serve
589 as a basis using that base expression. BASE may be the same as
590 C->BASE_EXPR; alternatively BASE can be a different tree that share the
591 underlining expression of C->BASE_EXPR. */
593 static void
594 record_potential_basis (slsr_cand_t c, tree base)
596 cand_chain_t node;
597 cand_chain **slot;
599 gcc_assert (base);
601 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
602 node->base_expr = base;
603 node->cand = c;
604 node->next = NULL;
605 slot = base_cand_map->find_slot (node, INSERT);
607 if (*slot)
609 cand_chain_t head = (cand_chain_t) (*slot);
610 node->next = head->next;
611 head->next = node;
613 else
614 *slot = node;
617 /* Allocate storage for a new candidate and initialize its fields.
618 Attempt to find a basis for the candidate.
620 For CAND_REF, an alternative base may also be recorded and used
621 to find a basis. This helps cases where the expression hidden
622 behind BASE (which is usually an SSA_NAME) has immediate offset,
623 e.g.
625 a2[i][j] = 1;
626 a2[i + 20][j] = 2; */
628 static slsr_cand_t
629 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
630 const widest_int &index, tree stride, tree ctype,
631 unsigned savings)
633 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
634 sizeof (slsr_cand));
635 c->cand_stmt = gs;
636 c->base_expr = base;
637 c->stride = stride;
638 c->index = index;
639 c->cand_type = ctype;
640 c->kind = kind;
641 c->cand_num = cand_vec.length () + 1;
642 c->next_interp = 0;
643 c->dependent = 0;
644 c->sibling = 0;
645 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
646 c->dead_savings = savings;
648 cand_vec.safe_push (c);
650 if (kind == CAND_PHI)
651 c->basis = 0;
652 else
653 c->basis = find_basis_for_candidate (c);
655 record_potential_basis (c, base);
656 if (flag_expensive_optimizations && kind == CAND_REF)
658 tree alt_base = get_alternative_base (base);
659 if (alt_base)
660 record_potential_basis (c, alt_base);
663 return c;
666 /* Determine the target cost of statement GS when compiling according
667 to SPEED. */
669 static int
670 stmt_cost (gimple *gs, bool speed)
672 tree lhs, rhs1, rhs2;
673 machine_mode lhs_mode;
675 gcc_assert (is_gimple_assign (gs));
676 lhs = gimple_assign_lhs (gs);
677 rhs1 = gimple_assign_rhs1 (gs);
678 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
680 switch (gimple_assign_rhs_code (gs))
682 case MULT_EXPR:
683 rhs2 = gimple_assign_rhs2 (gs);
685 if (tree_fits_shwi_p (rhs2))
686 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
688 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
689 return mul_cost (speed, lhs_mode);
691 case PLUS_EXPR:
692 case POINTER_PLUS_EXPR:
693 case MINUS_EXPR:
694 return add_cost (speed, lhs_mode);
696 case NEGATE_EXPR:
697 return neg_cost (speed, lhs_mode);
699 CASE_CONVERT:
700 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
702 /* Note that we don't assign costs to copies that in most cases
703 will go away. */
704 default:
708 gcc_unreachable ();
709 return 0;
712 /* Look up the defining statement for BASE_IN and return a pointer
713 to its candidate in the candidate table, if any; otherwise NULL.
714 Only CAND_ADD and CAND_MULT candidates are returned. */
716 static slsr_cand_t
717 base_cand_from_table (tree base_in)
719 slsr_cand_t *result;
721 gimple *def = SSA_NAME_DEF_STMT (base_in);
722 if (!def)
723 return (slsr_cand_t) NULL;
725 result = stmt_cand_map->get (def);
727 if (result && (*result)->kind != CAND_REF)
728 return *result;
730 return (slsr_cand_t) NULL;
733 /* Add an entry to the statement-to-candidate mapping. */
735 static void
736 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
738 gcc_assert (!stmt_cand_map->put (gs, c));
741 /* Given PHI which contains a phi statement, determine whether it
742 satisfies all the requirements of a phi candidate. If so, create
743 a candidate. Note that a CAND_PHI never has a basis itself, but
744 is used to help find a basis for subsequent candidates. */
746 static void
747 slsr_process_phi (gphi *phi, bool speed)
749 unsigned i;
750 tree arg0_base = NULL_TREE, base_type;
751 slsr_cand_t c;
752 struct loop *cand_loop = gimple_bb (phi)->loop_father;
753 unsigned savings = 0;
755 /* A CAND_PHI requires each of its arguments to have the same
756 derived base name. (See the module header commentary for a
757 definition of derived base names.) Furthermore, all feeding
758 definitions must be in the same position in the loop hierarchy
759 as PHI. */
761 for (i = 0; i < gimple_phi_num_args (phi); i++)
763 slsr_cand_t arg_cand;
764 tree arg = gimple_phi_arg_def (phi, i);
765 tree derived_base_name = NULL_TREE;
766 gimple *arg_stmt = NULL;
767 basic_block arg_bb = NULL;
769 if (TREE_CODE (arg) != SSA_NAME)
770 return;
772 arg_cand = base_cand_from_table (arg);
774 if (arg_cand)
776 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
778 if (!arg_cand->next_interp)
779 return;
781 arg_cand = lookup_cand (arg_cand->next_interp);
784 if (!integer_onep (arg_cand->stride))
785 return;
787 derived_base_name = arg_cand->base_expr;
788 arg_stmt = arg_cand->cand_stmt;
789 arg_bb = gimple_bb (arg_stmt);
791 /* Gather potential dead code savings if the phi statement
792 can be removed later on. */
793 if (has_single_use (arg))
795 if (gimple_code (arg_stmt) == GIMPLE_PHI)
796 savings += arg_cand->dead_savings;
797 else
798 savings += stmt_cost (arg_stmt, speed);
801 else
803 derived_base_name = arg;
805 if (SSA_NAME_IS_DEFAULT_DEF (arg))
806 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
807 else
808 gimple_bb (SSA_NAME_DEF_STMT (arg));
811 if (!arg_bb || arg_bb->loop_father != cand_loop)
812 return;
814 if (i == 0)
815 arg0_base = derived_base_name;
816 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
817 return;
820 /* Create the candidate. "alloc_cand_and_find_basis" is named
821 misleadingly for this case, as no basis will be sought for a
822 CAND_PHI. */
823 base_type = TREE_TYPE (arg0_base);
825 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
826 0, integer_one_node, base_type, savings);
828 /* Add the candidate to the statement-candidate mapping. */
829 add_cand_for_stmt (phi, c);
832 /* Given PBASE which is a pointer to tree, look up the defining
833 statement for it and check whether the candidate is in the
834 form of:
836 X = B + (1 * S), S is integer constant
837 X = B + (i * S), S is integer one
839 If so, set PBASE to the candidate's base_expr and return double
840 int (i * S).
841 Otherwise, just return double int zero. */
843 static widest_int
844 backtrace_base_for_ref (tree *pbase)
846 tree base_in = *pbase;
847 slsr_cand_t base_cand;
849 STRIP_NOPS (base_in);
851 /* Strip off widening conversion(s) to handle cases where
852 e.g. 'B' is widened from an 'int' in order to calculate
853 a 64-bit address. */
854 if (CONVERT_EXPR_P (base_in)
855 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
856 base_in = get_unwidened (base_in, NULL_TREE);
858 if (TREE_CODE (base_in) != SSA_NAME)
859 return 0;
861 base_cand = base_cand_from_table (base_in);
863 while (base_cand && base_cand->kind != CAND_PHI)
865 if (base_cand->kind == CAND_ADD
866 && base_cand->index == 1
867 && TREE_CODE (base_cand->stride) == INTEGER_CST)
869 /* X = B + (1 * S), S is integer constant. */
870 *pbase = base_cand->base_expr;
871 return wi::to_widest (base_cand->stride);
873 else if (base_cand->kind == CAND_ADD
874 && TREE_CODE (base_cand->stride) == INTEGER_CST
875 && integer_onep (base_cand->stride))
877 /* X = B + (i * S), S is integer one. */
878 *pbase = base_cand->base_expr;
879 return base_cand->index;
882 if (base_cand->next_interp)
883 base_cand = lookup_cand (base_cand->next_interp);
884 else
885 base_cand = NULL;
888 return 0;
891 /* Look for the following pattern:
893 *PBASE: MEM_REF (T1, C1)
895 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
897 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
899 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
901 *PINDEX: C4 * BITS_PER_UNIT
903 If not present, leave the input values unchanged and return FALSE.
904 Otherwise, modify the input values as follows and return TRUE:
906 *PBASE: T1
907 *POFFSET: MULT_EXPR (T2, C3)
908 *PINDEX: C1 + (C2 * C3) + C4
910 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
911 will be further restructured to:
913 *PBASE: T1
914 *POFFSET: MULT_EXPR (T2', C3)
915 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
917 static bool
918 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
919 tree *ptype)
921 tree base = *pbase, offset = *poffset;
922 widest_int index = *pindex;
923 tree mult_op0, t1, t2, type;
924 widest_int c1, c2, c3, c4, c5;
926 if (!base
927 || !offset
928 || TREE_CODE (base) != MEM_REF
929 || TREE_CODE (offset) != MULT_EXPR
930 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
931 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
932 return false;
934 t1 = TREE_OPERAND (base, 0);
935 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
936 type = TREE_TYPE (TREE_OPERAND (base, 1));
938 mult_op0 = TREE_OPERAND (offset, 0);
939 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
941 if (TREE_CODE (mult_op0) == PLUS_EXPR)
943 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
945 t2 = TREE_OPERAND (mult_op0, 0);
946 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
948 else
949 return false;
951 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
953 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
955 t2 = TREE_OPERAND (mult_op0, 0);
956 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
958 else
959 return false;
961 else
963 t2 = mult_op0;
964 c2 = 0;
967 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
968 c5 = backtrace_base_for_ref (&t2);
970 *pbase = t1;
971 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
972 wide_int_to_tree (sizetype, c3));
973 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
974 *ptype = type;
976 return true;
979 /* Given GS which contains a data reference, create a CAND_REF entry in
980 the candidate table and attempt to find a basis. */
982 static void
983 slsr_process_ref (gimple *gs)
985 tree ref_expr, base, offset, type;
986 HOST_WIDE_INT bitsize, bitpos;
987 machine_mode mode;
988 int unsignedp, volatilep;
989 slsr_cand_t c;
991 if (gimple_vdef (gs))
992 ref_expr = gimple_assign_lhs (gs);
993 else
994 ref_expr = gimple_assign_rhs1 (gs);
996 if (!handled_component_p (ref_expr)
997 || TREE_CODE (ref_expr) == BIT_FIELD_REF
998 || (TREE_CODE (ref_expr) == COMPONENT_REF
999 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1000 return;
1002 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1003 &unsignedp, &volatilep, false);
1004 widest_int index = bitpos;
1006 if (!restructure_reference (&base, &offset, &index, &type))
1007 return;
1009 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1010 type, 0);
1012 /* Add the candidate to the statement-candidate mapping. */
1013 add_cand_for_stmt (gs, c);
1016 /* Create a candidate entry for a statement GS, where GS multiplies
1017 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1018 about the two SSA names into the new candidate. Return the new
1019 candidate. */
1021 static slsr_cand_t
1022 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1024 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1025 widest_int index;
1026 unsigned savings = 0;
1027 slsr_cand_t c;
1028 slsr_cand_t base_cand = base_cand_from_table (base_in);
1030 /* Look at all interpretations of the base candidate, if necessary,
1031 to find information to propagate into this candidate. */
1032 while (base_cand && !base && base_cand->kind != CAND_PHI)
1035 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1037 /* Y = (B + i') * 1
1038 X = Y * Z
1039 ================
1040 X = (B + i') * Z */
1041 base = base_cand->base_expr;
1042 index = base_cand->index;
1043 stride = stride_in;
1044 ctype = base_cand->cand_type;
1045 if (has_single_use (base_in))
1046 savings = (base_cand->dead_savings
1047 + stmt_cost (base_cand->cand_stmt, speed));
1049 else if (base_cand->kind == CAND_ADD
1050 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1052 /* Y = B + (i' * S), S constant
1053 X = Y * Z
1054 ============================
1055 X = B + ((i' * S) * Z) */
1056 base = base_cand->base_expr;
1057 index = base_cand->index * wi::to_widest (base_cand->stride);
1058 stride = stride_in;
1059 ctype = base_cand->cand_type;
1060 if (has_single_use (base_in))
1061 savings = (base_cand->dead_savings
1062 + stmt_cost (base_cand->cand_stmt, speed));
1065 if (base_cand->next_interp)
1066 base_cand = lookup_cand (base_cand->next_interp);
1067 else
1068 base_cand = NULL;
1071 if (!base)
1073 /* No interpretations had anything useful to propagate, so
1074 produce X = (Y + 0) * Z. */
1075 base = base_in;
1076 index = 0;
1077 stride = stride_in;
1078 ctype = TREE_TYPE (base_in);
1081 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1082 ctype, savings);
1083 return c;
1086 /* Create a candidate entry for a statement GS, where GS multiplies
1087 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1088 information about BASE_IN into the new candidate. Return the new
1089 candidate. */
1091 static slsr_cand_t
1092 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1094 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1095 widest_int index, temp;
1096 unsigned savings = 0;
1097 slsr_cand_t c;
1098 slsr_cand_t base_cand = base_cand_from_table (base_in);
1100 /* Look at all interpretations of the base candidate, if necessary,
1101 to find information to propagate into this candidate. */
1102 while (base_cand && !base && base_cand->kind != CAND_PHI)
1104 if (base_cand->kind == CAND_MULT
1105 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1107 /* Y = (B + i') * S, S constant
1108 X = Y * c
1109 ============================
1110 X = (B + i') * (S * c) */
1111 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1112 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1114 base = base_cand->base_expr;
1115 index = base_cand->index;
1116 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1117 ctype = base_cand->cand_type;
1118 if (has_single_use (base_in))
1119 savings = (base_cand->dead_savings
1120 + stmt_cost (base_cand->cand_stmt, speed));
1123 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1125 /* Y = B + (i' * 1)
1126 X = Y * c
1127 ===========================
1128 X = (B + i') * c */
1129 base = base_cand->base_expr;
1130 index = base_cand->index;
1131 stride = stride_in;
1132 ctype = base_cand->cand_type;
1133 if (has_single_use (base_in))
1134 savings = (base_cand->dead_savings
1135 + stmt_cost (base_cand->cand_stmt, speed));
1137 else if (base_cand->kind == CAND_ADD
1138 && base_cand->index == 1
1139 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1141 /* Y = B + (1 * S), S constant
1142 X = Y * c
1143 ===========================
1144 X = (B + S) * c */
1145 base = base_cand->base_expr;
1146 index = wi::to_widest (base_cand->stride);
1147 stride = stride_in;
1148 ctype = base_cand->cand_type;
1149 if (has_single_use (base_in))
1150 savings = (base_cand->dead_savings
1151 + stmt_cost (base_cand->cand_stmt, speed));
1154 if (base_cand->next_interp)
1155 base_cand = lookup_cand (base_cand->next_interp);
1156 else
1157 base_cand = NULL;
1160 if (!base)
1162 /* No interpretations had anything useful to propagate, so
1163 produce X = (Y + 0) * c. */
1164 base = base_in;
1165 index = 0;
1166 stride = stride_in;
1167 ctype = TREE_TYPE (base_in);
1170 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1171 ctype, savings);
1172 return c;
1175 /* Given GS which is a multiply of scalar integers, make an appropriate
1176 entry in the candidate table. If this is a multiply of two SSA names,
1177 create two CAND_MULT interpretations and attempt to find a basis for
1178 each of them. Otherwise, create a single CAND_MULT and attempt to
1179 find a basis. */
1181 static void
1182 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1184 slsr_cand_t c, c2;
1186 /* If this is a multiply of an SSA name with itself, it is highly
1187 unlikely that we will get a strength reduction opportunity, so
1188 don't record it as a candidate. This simplifies the logic for
1189 finding a basis, so if this is removed that must be considered. */
1190 if (rhs1 == rhs2)
1191 return;
1193 if (TREE_CODE (rhs2) == SSA_NAME)
1195 /* Record an interpretation of this statement in the candidate table
1196 assuming RHS1 is the base expression and RHS2 is the stride. */
1197 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1199 /* Add the first interpretation to the statement-candidate mapping. */
1200 add_cand_for_stmt (gs, c);
1202 /* Record another interpretation of this statement assuming RHS1
1203 is the stride and RHS2 is the base expression. */
1204 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1205 c->next_interp = c2->cand_num;
1207 else
1209 /* Record an interpretation for the multiply-immediate. */
1210 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1212 /* Add the interpretation to the statement-candidate mapping. */
1213 add_cand_for_stmt (gs, c);
1217 /* Create a candidate entry for a statement GS, where GS adds two
1218 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1219 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1220 information about the two SSA names into the new candidate.
1221 Return the new candidate. */
1223 static slsr_cand_t
1224 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1225 bool subtract_p, bool speed)
1227 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1228 widest_int index;
1229 unsigned savings = 0;
1230 slsr_cand_t c;
1231 slsr_cand_t base_cand = base_cand_from_table (base_in);
1232 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1234 /* The most useful transformation is a multiply-immediate feeding
1235 an add or subtract. Look for that first. */
1236 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1238 if (addend_cand->kind == CAND_MULT
1239 && addend_cand->index == 0
1240 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1242 /* Z = (B + 0) * S, S constant
1243 X = Y +/- Z
1244 ===========================
1245 X = Y + ((+/-1 * S) * B) */
1246 base = base_in;
1247 index = wi::to_widest (addend_cand->stride);
1248 if (subtract_p)
1249 index = -index;
1250 stride = addend_cand->base_expr;
1251 ctype = TREE_TYPE (base_in);
1252 if (has_single_use (addend_in))
1253 savings = (addend_cand->dead_savings
1254 + stmt_cost (addend_cand->cand_stmt, speed));
1257 if (addend_cand->next_interp)
1258 addend_cand = lookup_cand (addend_cand->next_interp);
1259 else
1260 addend_cand = NULL;
1263 while (base_cand && !base && base_cand->kind != CAND_PHI)
1265 if (base_cand->kind == CAND_ADD
1266 && (base_cand->index == 0
1267 || operand_equal_p (base_cand->stride,
1268 integer_zero_node, 0)))
1270 /* Y = B + (i' * S), i' * S = 0
1271 X = Y +/- Z
1272 ============================
1273 X = B + (+/-1 * Z) */
1274 base = base_cand->base_expr;
1275 index = subtract_p ? -1 : 1;
1276 stride = addend_in;
1277 ctype = base_cand->cand_type;
1278 if (has_single_use (base_in))
1279 savings = (base_cand->dead_savings
1280 + stmt_cost (base_cand->cand_stmt, speed));
1282 else if (subtract_p)
1284 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1286 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1288 if (subtrahend_cand->kind == CAND_MULT
1289 && subtrahend_cand->index == 0
1290 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1292 /* Z = (B + 0) * S, S constant
1293 X = Y - Z
1294 ===========================
1295 Value: X = Y + ((-1 * S) * B) */
1296 base = base_in;
1297 index = wi::to_widest (subtrahend_cand->stride);
1298 index = -index;
1299 stride = subtrahend_cand->base_expr;
1300 ctype = TREE_TYPE (base_in);
1301 if (has_single_use (addend_in))
1302 savings = (subtrahend_cand->dead_savings
1303 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1306 if (subtrahend_cand->next_interp)
1307 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1308 else
1309 subtrahend_cand = NULL;
1313 if (base_cand->next_interp)
1314 base_cand = lookup_cand (base_cand->next_interp);
1315 else
1316 base_cand = NULL;
1319 if (!base)
1321 /* No interpretations had anything useful to propagate, so
1322 produce X = Y + (1 * Z). */
1323 base = base_in;
1324 index = subtract_p ? -1 : 1;
1325 stride = addend_in;
1326 ctype = TREE_TYPE (base_in);
1329 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1330 ctype, savings);
1331 return c;
1334 /* Create a candidate entry for a statement GS, where GS adds SSA
1335 name BASE_IN to constant INDEX_IN. Propagate any known information
1336 about BASE_IN into the new candidate. Return the new candidate. */
1338 static slsr_cand_t
1339 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1340 bool speed)
1342 enum cand_kind kind = CAND_ADD;
1343 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1344 widest_int index, multiple;
1345 unsigned savings = 0;
1346 slsr_cand_t c;
1347 slsr_cand_t base_cand = base_cand_from_table (base_in);
1349 while (base_cand && !base && base_cand->kind != CAND_PHI)
1351 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1353 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1354 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1355 sign, &multiple))
1357 /* Y = (B + i') * S, S constant, c = kS for some integer k
1358 X = Y + c
1359 ============================
1360 X = (B + (i'+ k)) * S
1362 Y = B + (i' * S), S constant, c = kS for some integer k
1363 X = Y + c
1364 ============================
1365 X = (B + (i'+ k)) * S */
1366 kind = base_cand->kind;
1367 base = base_cand->base_expr;
1368 index = base_cand->index + multiple;
1369 stride = base_cand->stride;
1370 ctype = base_cand->cand_type;
1371 if (has_single_use (base_in))
1372 savings = (base_cand->dead_savings
1373 + stmt_cost (base_cand->cand_stmt, speed));
1376 if (base_cand->next_interp)
1377 base_cand = lookup_cand (base_cand->next_interp);
1378 else
1379 base_cand = NULL;
1382 if (!base)
1384 /* No interpretations had anything useful to propagate, so
1385 produce X = Y + (c * 1). */
1386 kind = CAND_ADD;
1387 base = base_in;
1388 index = index_in;
1389 stride = integer_one_node;
1390 ctype = TREE_TYPE (base_in);
1393 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1394 ctype, savings);
1395 return c;
1398 /* Given GS which is an add or subtract of scalar integers or pointers,
1399 make at least one appropriate entry in the candidate table. */
1401 static void
1402 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1404 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1405 slsr_cand_t c = NULL, c2;
1407 if (TREE_CODE (rhs2) == SSA_NAME)
1409 /* First record an interpretation assuming RHS1 is the base expression
1410 and RHS2 is the stride. But it doesn't make sense for the
1411 stride to be a pointer, so don't record a candidate in that case. */
1412 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1414 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1416 /* Add the first interpretation to the statement-candidate
1417 mapping. */
1418 add_cand_for_stmt (gs, c);
1421 /* If the two RHS operands are identical, or this is a subtract,
1422 we're done. */
1423 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1424 return;
1426 /* Otherwise, record another interpretation assuming RHS2 is the
1427 base expression and RHS1 is the stride, again provided that the
1428 stride is not a pointer. */
1429 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1431 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1432 if (c)
1433 c->next_interp = c2->cand_num;
1434 else
1435 add_cand_for_stmt (gs, c2);
1438 else
1440 /* Record an interpretation for the add-immediate. */
1441 widest_int index = wi::to_widest (rhs2);
1442 if (subtract_p)
1443 index = -index;
1445 c = create_add_imm_cand (gs, rhs1, index, speed);
1447 /* Add the interpretation to the statement-candidate mapping. */
1448 add_cand_for_stmt (gs, c);
1452 /* Given GS which is a negate of a scalar integer, make an appropriate
1453 entry in the candidate table. A negate is equivalent to a multiply
1454 by -1. */
1456 static void
1457 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1459 /* Record a CAND_MULT interpretation for the multiply by -1. */
1460 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1462 /* Add the interpretation to the statement-candidate mapping. */
1463 add_cand_for_stmt (gs, c);
1466 /* Help function for legal_cast_p, operating on two trees. Checks
1467 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1468 for more details. */
1470 static bool
1471 legal_cast_p_1 (tree lhs, tree rhs)
1473 tree lhs_type, rhs_type;
1474 unsigned lhs_size, rhs_size;
1475 bool lhs_wraps, rhs_wraps;
1477 lhs_type = TREE_TYPE (lhs);
1478 rhs_type = TREE_TYPE (rhs);
1479 lhs_size = TYPE_PRECISION (lhs_type);
1480 rhs_size = TYPE_PRECISION (rhs_type);
1481 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1482 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1484 if (lhs_size < rhs_size
1485 || (rhs_wraps && !lhs_wraps)
1486 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1487 return false;
1489 return true;
1492 /* Return TRUE if GS is a statement that defines an SSA name from
1493 a conversion and is legal for us to combine with an add and multiply
1494 in the candidate table. For example, suppose we have:
1496 A = B + i;
1497 C = (type) A;
1498 D = C * S;
1500 Without the type-cast, we would create a CAND_MULT for D with base B,
1501 index i, and stride S. We want to record this candidate only if it
1502 is equivalent to apply the type cast following the multiply:
1504 A = B + i;
1505 E = A * S;
1506 D = (type) E;
1508 We will record the type with the candidate for D. This allows us
1509 to use a similar previous candidate as a basis. If we have earlier seen
1511 A' = B + i';
1512 C' = (type) A';
1513 D' = C' * S;
1515 we can replace D with
1517 D = D' + (i - i') * S;
1519 But if moving the type-cast would change semantics, we mustn't do this.
1521 This is legitimate for casts from a non-wrapping integral type to
1522 any integral type of the same or larger size. It is not legitimate
1523 to convert a wrapping type to a non-wrapping type, or to a wrapping
1524 type of a different size. I.e., with a wrapping type, we must
1525 assume that the addition B + i could wrap, in which case performing
1526 the multiply before or after one of the "illegal" type casts will
1527 have different semantics. */
1529 static bool
1530 legal_cast_p (gimple *gs, tree rhs)
1532 if (!is_gimple_assign (gs)
1533 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1534 return false;
1536 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1539 /* Given GS which is a cast to a scalar integer type, determine whether
1540 the cast is legal for strength reduction. If so, make at least one
1541 appropriate entry in the candidate table. */
1543 static void
1544 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1546 tree lhs, ctype;
1547 slsr_cand_t base_cand, c, c2;
1548 unsigned savings = 0;
1550 if (!legal_cast_p (gs, rhs1))
1551 return;
1553 lhs = gimple_assign_lhs (gs);
1554 base_cand = base_cand_from_table (rhs1);
1555 ctype = TREE_TYPE (lhs);
1557 if (base_cand && base_cand->kind != CAND_PHI)
1559 while (base_cand)
1561 /* Propagate all data from the base candidate except the type,
1562 which comes from the cast, and the base candidate's cast,
1563 which is no longer applicable. */
1564 if (has_single_use (rhs1))
1565 savings = (base_cand->dead_savings
1566 + stmt_cost (base_cand->cand_stmt, speed));
1568 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1569 base_cand->base_expr,
1570 base_cand->index, base_cand->stride,
1571 ctype, savings);
1572 if (base_cand->next_interp)
1573 base_cand = lookup_cand (base_cand->next_interp);
1574 else
1575 base_cand = NULL;
1578 else
1580 /* If nothing is known about the RHS, create fresh CAND_ADD and
1581 CAND_MULT interpretations:
1583 X = Y + (0 * 1)
1584 X = (Y + 0) * 1
1586 The first of these is somewhat arbitrary, but the choice of
1587 1 for the stride simplifies the logic for propagating casts
1588 into their uses. */
1589 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1590 0, integer_one_node, ctype, 0);
1591 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1592 0, integer_one_node, ctype, 0);
1593 c->next_interp = c2->cand_num;
1596 /* Add the first (or only) interpretation to the statement-candidate
1597 mapping. */
1598 add_cand_for_stmt (gs, c);
1601 /* Given GS which is a copy of a scalar integer type, make at least one
1602 appropriate entry in the candidate table.
1604 This interface is included for completeness, but is unnecessary
1605 if this pass immediately follows a pass that performs copy
1606 propagation, such as DOM. */
1608 static void
1609 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1611 slsr_cand_t base_cand, c, c2;
1612 unsigned savings = 0;
1614 base_cand = base_cand_from_table (rhs1);
1616 if (base_cand && base_cand->kind != CAND_PHI)
1618 while (base_cand)
1620 /* Propagate all data from the base candidate. */
1621 if (has_single_use (rhs1))
1622 savings = (base_cand->dead_savings
1623 + stmt_cost (base_cand->cand_stmt, speed));
1625 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1626 base_cand->base_expr,
1627 base_cand->index, base_cand->stride,
1628 base_cand->cand_type, savings);
1629 if (base_cand->next_interp)
1630 base_cand = lookup_cand (base_cand->next_interp);
1631 else
1632 base_cand = NULL;
1635 else
1637 /* If nothing is known about the RHS, create fresh CAND_ADD and
1638 CAND_MULT interpretations:
1640 X = Y + (0 * 1)
1641 X = (Y + 0) * 1
1643 The first of these is somewhat arbitrary, but the choice of
1644 1 for the stride simplifies the logic for propagating casts
1645 into their uses. */
1646 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1647 0, integer_one_node, TREE_TYPE (rhs1), 0);
1648 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1649 0, integer_one_node, TREE_TYPE (rhs1), 0);
1650 c->next_interp = c2->cand_num;
1653 /* Add the first (or only) interpretation to the statement-candidate
1654 mapping. */
1655 add_cand_for_stmt (gs, c);
1658 class find_candidates_dom_walker : public dom_walker
1660 public:
1661 find_candidates_dom_walker (cdi_direction direction)
1662 : dom_walker (direction) {}
1663 virtual void before_dom_children (basic_block);
1666 /* Find strength-reduction candidates in block BB. */
1668 void
1669 find_candidates_dom_walker::before_dom_children (basic_block bb)
1671 bool speed = optimize_bb_for_speed_p (bb);
1673 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1674 gsi_next (&gsi))
1675 slsr_process_phi (gsi.phi (), speed);
1677 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1678 gsi_next (&gsi))
1680 gimple *gs = gsi_stmt (gsi);
1682 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1683 slsr_process_ref (gs);
1685 else if (is_gimple_assign (gs)
1686 && SCALAR_INT_MODE_P
1687 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1689 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1691 switch (gimple_assign_rhs_code (gs))
1693 case MULT_EXPR:
1694 case PLUS_EXPR:
1695 rhs1 = gimple_assign_rhs1 (gs);
1696 rhs2 = gimple_assign_rhs2 (gs);
1697 /* Should never happen, but currently some buggy situations
1698 in earlier phases put constants in rhs1. */
1699 if (TREE_CODE (rhs1) != SSA_NAME)
1700 continue;
1701 break;
1703 /* Possible future opportunity: rhs1 of a ptr+ can be
1704 an ADDR_EXPR. */
1705 case POINTER_PLUS_EXPR:
1706 case MINUS_EXPR:
1707 rhs2 = gimple_assign_rhs2 (gs);
1708 /* Fall-through. */
1710 CASE_CONVERT:
1711 case MODIFY_EXPR:
1712 case NEGATE_EXPR:
1713 rhs1 = gimple_assign_rhs1 (gs);
1714 if (TREE_CODE (rhs1) != SSA_NAME)
1715 continue;
1716 break;
1718 default:
1722 switch (gimple_assign_rhs_code (gs))
1724 case MULT_EXPR:
1725 slsr_process_mul (gs, rhs1, rhs2, speed);
1726 break;
1728 case PLUS_EXPR:
1729 case POINTER_PLUS_EXPR:
1730 case MINUS_EXPR:
1731 slsr_process_add (gs, rhs1, rhs2, speed);
1732 break;
1734 case NEGATE_EXPR:
1735 slsr_process_neg (gs, rhs1, speed);
1736 break;
1738 CASE_CONVERT:
1739 slsr_process_cast (gs, rhs1, speed);
1740 break;
1742 case MODIFY_EXPR:
1743 slsr_process_copy (gs, rhs1, speed);
1744 break;
1746 default:
1753 /* Dump a candidate for debug. */
1755 static void
1756 dump_candidate (slsr_cand_t c)
1758 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1759 gimple_bb (c->cand_stmt)->index);
1760 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1761 switch (c->kind)
1763 case CAND_MULT:
1764 fputs (" MULT : (", dump_file);
1765 print_generic_expr (dump_file, c->base_expr, 0);
1766 fputs (" + ", dump_file);
1767 print_decs (c->index, dump_file);
1768 fputs (") * ", dump_file);
1769 print_generic_expr (dump_file, c->stride, 0);
1770 fputs (" : ", dump_file);
1771 break;
1772 case CAND_ADD:
1773 fputs (" ADD : ", dump_file);
1774 print_generic_expr (dump_file, c->base_expr, 0);
1775 fputs (" + (", dump_file);
1776 print_decs (c->index, dump_file);
1777 fputs (" * ", dump_file);
1778 print_generic_expr (dump_file, c->stride, 0);
1779 fputs (") : ", dump_file);
1780 break;
1781 case CAND_REF:
1782 fputs (" REF : ", dump_file);
1783 print_generic_expr (dump_file, c->base_expr, 0);
1784 fputs (" + (", dump_file);
1785 print_generic_expr (dump_file, c->stride, 0);
1786 fputs (") + ", dump_file);
1787 print_decs (c->index, dump_file);
1788 fputs (" : ", dump_file);
1789 break;
1790 case CAND_PHI:
1791 fputs (" PHI : ", dump_file);
1792 print_generic_expr (dump_file, c->base_expr, 0);
1793 fputs (" + (unknown * ", dump_file);
1794 print_generic_expr (dump_file, c->stride, 0);
1795 fputs (") : ", dump_file);
1796 break;
1797 default:
1798 gcc_unreachable ();
1800 print_generic_expr (dump_file, c->cand_type, 0);
1801 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1802 c->basis, c->dependent, c->sibling);
1803 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1804 c->next_interp, c->dead_savings);
1805 if (c->def_phi)
1806 fprintf (dump_file, " phi: %d\n", c->def_phi);
1807 fputs ("\n", dump_file);
1810 /* Dump the candidate vector for debug. */
1812 static void
1813 dump_cand_vec (void)
1815 unsigned i;
1816 slsr_cand_t c;
1818 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1820 FOR_EACH_VEC_ELT (cand_vec, i, c)
1821 dump_candidate (c);
1824 /* Callback used to dump the candidate chains hash table. */
1827 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1829 const_cand_chain_t chain = *slot;
1830 cand_chain_t p;
1832 print_generic_expr (dump_file, chain->base_expr, 0);
1833 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1835 for (p = chain->next; p; p = p->next)
1836 fprintf (dump_file, " -> %d", p->cand->cand_num);
1838 fputs ("\n", dump_file);
1839 return 1;
1842 /* Dump the candidate chains. */
1844 static void
1845 dump_cand_chains (void)
1847 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1848 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1849 (NULL);
1850 fputs ("\n", dump_file);
1853 /* Dump the increment vector for debug. */
1855 static void
1856 dump_incr_vec (void)
1858 if (dump_file && (dump_flags & TDF_DETAILS))
1860 unsigned i;
1862 fprintf (dump_file, "\nIncrement vector:\n\n");
1864 for (i = 0; i < incr_vec_len; i++)
1866 fprintf (dump_file, "%3d increment: ", i);
1867 print_decs (incr_vec[i].incr, dump_file);
1868 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1869 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1870 fputs ("\n initializer: ", dump_file);
1871 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1872 fputs ("\n\n", dump_file);
1877 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1878 data reference. */
1880 static void
1881 replace_ref (tree *expr, slsr_cand_t c)
1883 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1884 unsigned HOST_WIDE_INT misalign;
1885 unsigned align;
1887 /* Ensure the memory reference carries the minimum alignment
1888 requirement for the data type. See PR58041. */
1889 get_object_alignment_1 (*expr, &align, &misalign);
1890 if (misalign != 0)
1891 align = (misalign & -misalign);
1892 if (align < TYPE_ALIGN (acc_type))
1893 acc_type = build_aligned_type (acc_type, align);
1895 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1896 c->base_expr, c->stride);
1897 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1898 wide_int_to_tree (c->cand_type, c->index));
1900 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1901 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1902 TREE_OPERAND (mem_ref, 0)
1903 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1904 /*simple_p=*/true, NULL,
1905 /*before=*/true, GSI_SAME_STMT);
1906 copy_ref_info (mem_ref, *expr);
1907 *expr = mem_ref;
1908 update_stmt (c->cand_stmt);
1911 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1912 dependent of candidate C with an equivalent strength-reduced data
1913 reference. */
1915 static void
1916 replace_refs (slsr_cand_t c)
1918 if (dump_file && (dump_flags & TDF_DETAILS))
1920 fputs ("Replacing reference: ", dump_file);
1921 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1924 if (gimple_vdef (c->cand_stmt))
1926 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1927 replace_ref (lhs, c);
1929 else
1931 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1932 replace_ref (rhs, c);
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1937 fputs ("With: ", dump_file);
1938 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1939 fputs ("\n", dump_file);
1942 if (c->sibling)
1943 replace_refs (lookup_cand (c->sibling));
1945 if (c->dependent)
1946 replace_refs (lookup_cand (c->dependent));
1949 /* Return TRUE if candidate C is dependent upon a PHI. */
1951 static bool
1952 phi_dependent_cand_p (slsr_cand_t c)
1954 /* A candidate is not necessarily dependent upon a PHI just because
1955 it has a phi definition for its base name. It may have a basis
1956 that relies upon the same phi definition, in which case the PHI
1957 is irrelevant to this candidate. */
1958 return (c->def_phi
1959 && c->basis
1960 && lookup_cand (c->basis)->def_phi != c->def_phi);
1963 /* Calculate the increment required for candidate C relative to
1964 its basis. */
1966 static widest_int
1967 cand_increment (slsr_cand_t c)
1969 slsr_cand_t basis;
1971 /* If the candidate doesn't have a basis, just return its own
1972 index. This is useful in record_increments to help us find
1973 an existing initializer. Also, if the candidate's basis is
1974 hidden by a phi, then its own index will be the increment
1975 from the newly introduced phi basis. */
1976 if (!c->basis || phi_dependent_cand_p (c))
1977 return c->index;
1979 basis = lookup_cand (c->basis);
1980 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1981 return c->index - basis->index;
1984 /* Calculate the increment required for candidate C relative to
1985 its basis. If we aren't going to generate pointer arithmetic
1986 for this candidate, return the absolute value of that increment
1987 instead. */
1989 static inline widest_int
1990 cand_abs_increment (slsr_cand_t c)
1992 widest_int increment = cand_increment (c);
1994 if (!address_arithmetic_p && wi::neg_p (increment))
1995 increment = -increment;
1997 return increment;
2000 /* Return TRUE iff candidate C has already been replaced under
2001 another interpretation. */
2003 static inline bool
2004 cand_already_replaced (slsr_cand_t c)
2006 return (gimple_bb (c->cand_stmt) == 0);
2009 /* Common logic used by replace_unconditional_candidate and
2010 replace_conditional_candidate. */
2012 static void
2013 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2015 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2016 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2018 /* It is highly unlikely, but possible, that the resulting
2019 bump doesn't fit in a HWI. Abandon the replacement
2020 in this case. This does not affect siblings or dependents
2021 of C. Restriction to signed HWI is conservative for unsigned
2022 types but allows for safe negation without twisted logic. */
2023 if (wi::fits_shwi_p (bump)
2024 && bump.to_shwi () != HOST_WIDE_INT_MIN
2025 /* It is not useful to replace casts, copies, or adds of
2026 an SSA name and a constant. */
2027 && cand_code != MODIFY_EXPR
2028 && !CONVERT_EXPR_CODE_P (cand_code)
2029 && cand_code != PLUS_EXPR
2030 && cand_code != POINTER_PLUS_EXPR
2031 && cand_code != MINUS_EXPR)
2033 enum tree_code code = PLUS_EXPR;
2034 tree bump_tree;
2035 gimple *stmt_to_print = NULL;
2037 /* If the basis name and the candidate's LHS have incompatible
2038 types, introduce a cast. */
2039 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2040 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2041 if (wi::neg_p (bump))
2043 code = MINUS_EXPR;
2044 bump = -bump;
2047 bump_tree = wide_int_to_tree (target_type, bump);
2049 if (dump_file && (dump_flags & TDF_DETAILS))
2051 fputs ("Replacing: ", dump_file);
2052 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2055 if (bump == 0)
2057 tree lhs = gimple_assign_lhs (c->cand_stmt);
2058 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2059 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2060 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2061 gsi_replace (&gsi, copy_stmt, false);
2062 c->cand_stmt = copy_stmt;
2063 if (dump_file && (dump_flags & TDF_DETAILS))
2064 stmt_to_print = copy_stmt;
2066 else
2068 tree rhs1, rhs2;
2069 if (cand_code != NEGATE_EXPR) {
2070 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2071 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2073 if (cand_code != NEGATE_EXPR
2074 && ((operand_equal_p (rhs1, basis_name, 0)
2075 && operand_equal_p (rhs2, bump_tree, 0))
2076 || (operand_equal_p (rhs1, bump_tree, 0)
2077 && operand_equal_p (rhs2, basis_name, 0))))
2079 if (dump_file && (dump_flags & TDF_DETAILS))
2081 fputs ("(duplicate, not actually replacing)", dump_file);
2082 stmt_to_print = c->cand_stmt;
2085 else
2087 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2088 gimple_assign_set_rhs_with_ops (&gsi, code,
2089 basis_name, bump_tree);
2090 update_stmt (gsi_stmt (gsi));
2091 c->cand_stmt = gsi_stmt (gsi);
2092 if (dump_file && (dump_flags & TDF_DETAILS))
2093 stmt_to_print = gsi_stmt (gsi);
2097 if (dump_file && (dump_flags & TDF_DETAILS))
2099 fputs ("With: ", dump_file);
2100 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2101 fputs ("\n", dump_file);
2106 /* Replace candidate C with an add or subtract. Note that we only
2107 operate on CAND_MULTs with known strides, so we will never generate
2108 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2109 X = Y + ((i - i') * S), as described in the module commentary. The
2110 folded value ((i - i') * S) is referred to here as the "bump." */
2112 static void
2113 replace_unconditional_candidate (slsr_cand_t c)
2115 slsr_cand_t basis;
2117 if (cand_already_replaced (c))
2118 return;
2120 basis = lookup_cand (c->basis);
2121 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2123 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2126 /* Return the index in the increment vector of the given INCREMENT,
2127 or -1 if not found. The latter can occur if more than
2128 MAX_INCR_VEC_LEN increments have been found. */
2130 static inline int
2131 incr_vec_index (const widest_int &increment)
2133 unsigned i;
2135 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2138 if (i < incr_vec_len)
2139 return i;
2140 else
2141 return -1;
2144 /* Create a new statement along edge E to add BASIS_NAME to the product
2145 of INCREMENT and the stride of candidate C. Create and return a new
2146 SSA name from *VAR to be used as the LHS of the new statement.
2147 KNOWN_STRIDE is true iff C's stride is a constant. */
2149 static tree
2150 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2151 widest_int increment, edge e, location_t loc,
2152 bool known_stride)
2154 basic_block insert_bb;
2155 gimple_stmt_iterator gsi;
2156 tree lhs, basis_type;
2157 gassign *new_stmt;
2159 /* If the add candidate along this incoming edge has the same
2160 index as C's hidden basis, the hidden basis represents this
2161 edge correctly. */
2162 if (increment == 0)
2163 return basis_name;
2165 basis_type = TREE_TYPE (basis_name);
2166 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2168 if (known_stride)
2170 tree bump_tree;
2171 enum tree_code code = PLUS_EXPR;
2172 widest_int bump = increment * wi::to_widest (c->stride);
2173 if (wi::neg_p (bump))
2175 code = MINUS_EXPR;
2176 bump = -bump;
2179 bump_tree = wide_int_to_tree (basis_type, bump);
2180 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2182 else
2184 int i;
2185 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2186 i = incr_vec_index (negate_incr ? -increment : increment);
2187 gcc_assert (i >= 0);
2189 if (incr_vec[i].initializer)
2191 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2192 new_stmt = gimple_build_assign (lhs, code, basis_name,
2193 incr_vec[i].initializer);
2195 else if (increment == 1)
2196 new_stmt = gimple_build_assign (lhs, PLUS_EXPR, basis_name, c->stride);
2197 else if (increment == -1)
2198 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2199 c->stride);
2200 else
2201 gcc_unreachable ();
2204 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2205 gsi = gsi_last_bb (insert_bb);
2207 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2208 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2209 else
2210 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2212 gimple_set_location (new_stmt, loc);
2214 if (dump_file && (dump_flags & TDF_DETAILS))
2216 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2217 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2220 return lhs;
2223 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2224 is hidden by the phi node FROM_PHI, create a new phi node in the same
2225 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2226 with its phi arguments representing conditional adjustments to the
2227 hidden basis along conditional incoming paths. Those adjustments are
2228 made by creating add statements (and sometimes recursively creating
2229 phis) along those incoming paths. LOC is the location to attach to
2230 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2231 constant. */
2233 static tree
2234 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2235 location_t loc, bool known_stride)
2237 int i;
2238 tree name, phi_arg;
2239 gphi *phi;
2240 vec<tree> phi_args;
2241 slsr_cand_t basis = lookup_cand (c->basis);
2242 int nargs = gimple_phi_num_args (from_phi);
2243 basic_block phi_bb = gimple_bb (from_phi);
2244 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2245 phi_args.create (nargs);
2247 /* Process each argument of the existing phi that represents
2248 conditionally-executed add candidates. */
2249 for (i = 0; i < nargs; i++)
2251 edge e = (*phi_bb->preds)[i];
2252 tree arg = gimple_phi_arg_def (from_phi, i);
2253 tree feeding_def;
2255 /* If the phi argument is the base name of the CAND_PHI, then
2256 this incoming arc should use the hidden basis. */
2257 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2258 if (basis->index == 0)
2259 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2260 else
2262 widest_int incr = -basis->index;
2263 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2264 e, loc, known_stride);
2266 else
2268 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2270 /* If there is another phi along this incoming edge, we must
2271 process it in the same fashion to ensure that all basis
2272 adjustments are made along its incoming edges. */
2273 if (gimple_code (arg_def) == GIMPLE_PHI)
2274 feeding_def = create_phi_basis (c, arg_def, basis_name,
2275 loc, known_stride);
2276 else
2278 slsr_cand_t arg_cand = base_cand_from_table (arg);
2279 widest_int diff = arg_cand->index - basis->index;
2280 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2281 e, loc, known_stride);
2285 /* Because of recursion, we need to save the arguments in a vector
2286 so we can create the PHI statement all at once. Otherwise the
2287 storage for the half-created PHI can be reclaimed. */
2288 phi_args.safe_push (feeding_def);
2291 /* Create the new phi basis. */
2292 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2293 phi = create_phi_node (name, phi_bb);
2294 SSA_NAME_DEF_STMT (name) = phi;
2296 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2298 edge e = (*phi_bb->preds)[i];
2299 add_phi_arg (phi, phi_arg, e, loc);
2302 update_stmt (phi);
2304 if (dump_file && (dump_flags & TDF_DETAILS))
2306 fputs ("Introducing new phi basis: ", dump_file);
2307 print_gimple_stmt (dump_file, phi, 0, 0);
2310 return name;
2313 /* Given a candidate C whose basis is hidden by at least one intervening
2314 phi, introduce a matching number of new phis to represent its basis
2315 adjusted by conditional increments along possible incoming paths. Then
2316 replace C as though it were an unconditional candidate, using the new
2317 basis. */
2319 static void
2320 replace_conditional_candidate (slsr_cand_t c)
2322 tree basis_name, name;
2323 slsr_cand_t basis;
2324 location_t loc;
2326 /* Look up the LHS SSA name from C's basis. This will be the
2327 RHS1 of the adds we will introduce to create new phi arguments. */
2328 basis = lookup_cand (c->basis);
2329 basis_name = gimple_assign_lhs (basis->cand_stmt);
2331 /* Create a new phi statement which will represent C's true basis
2332 after the transformation is complete. */
2333 loc = gimple_location (c->cand_stmt);
2334 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2335 basis_name, loc, KNOWN_STRIDE);
2336 /* Replace C with an add of the new basis phi and a constant. */
2337 widest_int bump = c->index * wi::to_widest (c->stride);
2339 replace_mult_candidate (c, name, bump);
2342 /* Compute the expected costs of inserting basis adjustments for
2343 candidate C with phi-definition PHI. The cost of inserting
2344 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2345 which are themselves phi results, recursively calculate costs
2346 for those phis as well. */
2348 static int
2349 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2351 unsigned i;
2352 int cost = 0;
2353 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2355 /* If we work our way back to a phi that isn't dominated by the hidden
2356 basis, this isn't a candidate for replacement. Indicate this by
2357 returning an unreasonably high cost. It's not easy to detect
2358 these situations when determining the basis, so we defer the
2359 decision until now. */
2360 basic_block phi_bb = gimple_bb (phi);
2361 slsr_cand_t basis = lookup_cand (c->basis);
2362 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2364 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2365 return COST_INFINITE;
2367 for (i = 0; i < gimple_phi_num_args (phi); i++)
2369 tree arg = gimple_phi_arg_def (phi, i);
2371 if (arg != phi_cand->base_expr)
2373 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2375 if (gimple_code (arg_def) == GIMPLE_PHI)
2376 cost += phi_add_costs (arg_def, c, one_add_cost);
2377 else
2379 slsr_cand_t arg_cand = base_cand_from_table (arg);
2381 if (arg_cand->index != c->index)
2382 cost += one_add_cost;
2387 return cost;
2390 /* For candidate C, each sibling of candidate C, and each dependent of
2391 candidate C, determine whether the candidate is dependent upon a
2392 phi that hides its basis. If not, replace the candidate unconditionally.
2393 Otherwise, determine whether the cost of introducing compensation code
2394 for the candidate is offset by the gains from strength reduction. If
2395 so, replace the candidate and introduce the compensation code. */
2397 static void
2398 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2400 if (phi_dependent_cand_p (c))
2402 if (c->kind == CAND_MULT)
2404 /* A candidate dependent upon a phi will replace a multiply by
2405 a constant with an add, and will insert at most one add for
2406 each phi argument. Add these costs with the potential dead-code
2407 savings to determine profitability. */
2408 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2409 int mult_savings = stmt_cost (c->cand_stmt, speed);
2410 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2411 tree phi_result = gimple_phi_result (phi);
2412 int one_add_cost = add_cost (speed,
2413 TYPE_MODE (TREE_TYPE (phi_result)));
2414 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2415 int cost = add_costs - mult_savings - c->dead_savings;
2417 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2420 fprintf (dump_file, " add_costs = %d\n", add_costs);
2421 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2422 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2423 fprintf (dump_file, " cost = %d\n", cost);
2424 if (cost <= COST_NEUTRAL)
2425 fputs (" Replacing...\n", dump_file);
2426 else
2427 fputs (" Not replaced.\n", dump_file);
2430 if (cost <= COST_NEUTRAL)
2431 replace_conditional_candidate (c);
2434 else
2435 replace_unconditional_candidate (c);
2437 if (c->sibling)
2438 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2440 if (c->dependent)
2441 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2444 /* Count the number of candidates in the tree rooted at C that have
2445 not already been replaced under other interpretations. */
2447 static int
2448 count_candidates (slsr_cand_t c)
2450 unsigned count = cand_already_replaced (c) ? 0 : 1;
2452 if (c->sibling)
2453 count += count_candidates (lookup_cand (c->sibling));
2455 if (c->dependent)
2456 count += count_candidates (lookup_cand (c->dependent));
2458 return count;
2461 /* Increase the count of INCREMENT by one in the increment vector.
2462 INCREMENT is associated with candidate C. If INCREMENT is to be
2463 conditionally executed as part of a conditional candidate replacement,
2464 IS_PHI_ADJUST is true, otherwise false. If an initializer
2465 T_0 = stride * I is provided by a candidate that dominates all
2466 candidates with the same increment, also record T_0 for subsequent use. */
2468 static void
2469 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2471 bool found = false;
2472 unsigned i;
2474 /* Treat increments that differ only in sign as identical so as to
2475 share initializers, unless we are generating pointer arithmetic. */
2476 if (!address_arithmetic_p && wi::neg_p (increment))
2477 increment = -increment;
2479 for (i = 0; i < incr_vec_len; i++)
2481 if (incr_vec[i].incr == increment)
2483 incr_vec[i].count++;
2484 found = true;
2486 /* If we previously recorded an initializer that doesn't
2487 dominate this candidate, it's not going to be useful to
2488 us after all. */
2489 if (incr_vec[i].initializer
2490 && !dominated_by_p (CDI_DOMINATORS,
2491 gimple_bb (c->cand_stmt),
2492 incr_vec[i].init_bb))
2494 incr_vec[i].initializer = NULL_TREE;
2495 incr_vec[i].init_bb = NULL;
2498 break;
2502 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2504 /* The first time we see an increment, create the entry for it.
2505 If this is the root candidate which doesn't have a basis, set
2506 the count to zero. We're only processing it so it can possibly
2507 provide an initializer for other candidates. */
2508 incr_vec[incr_vec_len].incr = increment;
2509 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2510 incr_vec[incr_vec_len].cost = COST_INFINITE;
2512 /* Optimistically record the first occurrence of this increment
2513 as providing an initializer (if it does); we will revise this
2514 opinion later if it doesn't dominate all other occurrences.
2515 Exception: increments of -1, 0, 1 never need initializers;
2516 and phi adjustments don't ever provide initializers. */
2517 if (c->kind == CAND_ADD
2518 && !is_phi_adjust
2519 && c->index == increment
2520 && (wi::gts_p (increment, 1)
2521 || wi::lts_p (increment, -1))
2522 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2523 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2525 tree t0 = NULL_TREE;
2526 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2527 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2528 if (operand_equal_p (rhs1, c->base_expr, 0))
2529 t0 = rhs2;
2530 else if (operand_equal_p (rhs2, c->base_expr, 0))
2531 t0 = rhs1;
2532 if (t0
2533 && SSA_NAME_DEF_STMT (t0)
2534 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2536 incr_vec[incr_vec_len].initializer = t0;
2537 incr_vec[incr_vec_len++].init_bb
2538 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2540 else
2542 incr_vec[incr_vec_len].initializer = NULL_TREE;
2543 incr_vec[incr_vec_len++].init_bb = NULL;
2546 else
2548 incr_vec[incr_vec_len].initializer = NULL_TREE;
2549 incr_vec[incr_vec_len++].init_bb = NULL;
2554 /* Given phi statement PHI that hides a candidate from its BASIS, find
2555 the increments along each incoming arc (recursively handling additional
2556 phis that may be present) and record them. These increments are the
2557 difference in index between the index-adjusting statements and the
2558 index of the basis. */
2560 static void
2561 record_phi_increments (slsr_cand_t basis, gimple *phi)
2563 unsigned i;
2564 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2566 for (i = 0; i < gimple_phi_num_args (phi); i++)
2568 tree arg = gimple_phi_arg_def (phi, i);
2570 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2572 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2574 if (gimple_code (arg_def) == GIMPLE_PHI)
2575 record_phi_increments (basis, arg_def);
2576 else
2578 slsr_cand_t arg_cand = base_cand_from_table (arg);
2579 widest_int diff = arg_cand->index - basis->index;
2580 record_increment (arg_cand, diff, PHI_ADJUST);
2586 /* Determine how many times each unique increment occurs in the set
2587 of candidates rooted at C's parent, recording the data in the
2588 increment vector. For each unique increment I, if an initializer
2589 T_0 = stride * I is provided by a candidate that dominates all
2590 candidates with the same increment, also record T_0 for subsequent
2591 use. */
2593 static void
2594 record_increments (slsr_cand_t c)
2596 if (!cand_already_replaced (c))
2598 if (!phi_dependent_cand_p (c))
2599 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2600 else
2602 /* A candidate with a basis hidden by a phi will have one
2603 increment for its relationship to the index represented by
2604 the phi, and potentially additional increments along each
2605 incoming edge. For the root of the dependency tree (which
2606 has no basis), process just the initial index in case it has
2607 an initializer that can be used by subsequent candidates. */
2608 record_increment (c, c->index, NOT_PHI_ADJUST);
2610 if (c->basis)
2611 record_phi_increments (lookup_cand (c->basis),
2612 lookup_cand (c->def_phi)->cand_stmt);
2616 if (c->sibling)
2617 record_increments (lookup_cand (c->sibling));
2619 if (c->dependent)
2620 record_increments (lookup_cand (c->dependent));
2623 /* Add up and return the costs of introducing add statements that
2624 require the increment INCR on behalf of candidate C and phi
2625 statement PHI. Accumulate into *SAVINGS the potential savings
2626 from removing existing statements that feed PHI and have no other
2627 uses. */
2629 static int
2630 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2631 int *savings)
2633 unsigned i;
2634 int cost = 0;
2635 slsr_cand_t basis = lookup_cand (c->basis);
2636 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2638 for (i = 0; i < gimple_phi_num_args (phi); i++)
2640 tree arg = gimple_phi_arg_def (phi, i);
2642 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2644 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2646 if (gimple_code (arg_def) == GIMPLE_PHI)
2648 int feeding_savings = 0;
2649 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2650 if (has_single_use (gimple_phi_result (arg_def)))
2651 *savings += feeding_savings;
2653 else
2655 slsr_cand_t arg_cand = base_cand_from_table (arg);
2656 widest_int diff = arg_cand->index - basis->index;
2658 if (incr == diff)
2660 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2661 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2662 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2663 if (has_single_use (lhs))
2664 *savings += stmt_cost (arg_cand->cand_stmt, true);
2670 return cost;
2673 /* Return the first candidate in the tree rooted at C that has not
2674 already been replaced, favoring siblings over dependents. */
2676 static slsr_cand_t
2677 unreplaced_cand_in_tree (slsr_cand_t c)
2679 if (!cand_already_replaced (c))
2680 return c;
2682 if (c->sibling)
2684 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2685 if (sib)
2686 return sib;
2689 if (c->dependent)
2691 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2692 if (dep)
2693 return dep;
2696 return NULL;
2699 /* Return TRUE if the candidates in the tree rooted at C should be
2700 optimized for speed, else FALSE. We estimate this based on the block
2701 containing the most dominant candidate in the tree that has not yet
2702 been replaced. */
2704 static bool
2705 optimize_cands_for_speed_p (slsr_cand_t c)
2707 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2708 gcc_assert (c2);
2709 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2712 /* Add COST_IN to the lowest cost of any dependent path starting at
2713 candidate C or any of its siblings, counting only candidates along
2714 such paths with increment INCR. Assume that replacing a candidate
2715 reduces cost by REPL_SAVINGS. Also account for savings from any
2716 statements that would go dead. If COUNT_PHIS is true, include
2717 costs of introducing feeding statements for conditional candidates. */
2719 static int
2720 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2721 const widest_int &incr, bool count_phis)
2723 int local_cost, sib_cost, savings = 0;
2724 widest_int cand_incr = cand_abs_increment (c);
2726 if (cand_already_replaced (c))
2727 local_cost = cost_in;
2728 else if (incr == cand_incr)
2729 local_cost = cost_in - repl_savings - c->dead_savings;
2730 else
2731 local_cost = cost_in - c->dead_savings;
2733 if (count_phis
2734 && phi_dependent_cand_p (c)
2735 && !cand_already_replaced (c))
2737 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2738 local_cost += phi_incr_cost (c, incr, phi, &savings);
2740 if (has_single_use (gimple_phi_result (phi)))
2741 local_cost -= savings;
2744 if (c->dependent)
2745 local_cost = lowest_cost_path (local_cost, repl_savings,
2746 lookup_cand (c->dependent), incr,
2747 count_phis);
2749 if (c->sibling)
2751 sib_cost = lowest_cost_path (cost_in, repl_savings,
2752 lookup_cand (c->sibling), incr,
2753 count_phis);
2754 local_cost = MIN (local_cost, sib_cost);
2757 return local_cost;
2760 /* Compute the total savings that would accrue from all replacements
2761 in the candidate tree rooted at C, counting only candidates with
2762 increment INCR. Assume that replacing a candidate reduces cost
2763 by REPL_SAVINGS. Also account for savings from statements that
2764 would go dead. */
2766 static int
2767 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2768 bool count_phis)
2770 int savings = 0;
2771 widest_int cand_incr = cand_abs_increment (c);
2773 if (incr == cand_incr && !cand_already_replaced (c))
2774 savings += repl_savings + c->dead_savings;
2776 if (count_phis
2777 && phi_dependent_cand_p (c)
2778 && !cand_already_replaced (c))
2780 int phi_savings = 0;
2781 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2782 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2784 if (has_single_use (gimple_phi_result (phi)))
2785 savings += phi_savings;
2788 if (c->dependent)
2789 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2790 count_phis);
2792 if (c->sibling)
2793 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2794 count_phis);
2796 return savings;
2799 /* Use target-specific costs to determine and record which increments
2800 in the current candidate tree are profitable to replace, assuming
2801 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2802 the candidate tree.
2804 One slight limitation here is that we don't account for the possible
2805 introduction of casts in some cases. See replace_one_candidate for
2806 the cases where these are introduced. This should probably be cleaned
2807 up sometime. */
2809 static void
2810 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2812 unsigned i;
2814 for (i = 0; i < incr_vec_len; i++)
2816 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2818 /* If somehow this increment is bigger than a HWI, we won't
2819 be optimizing candidates that use it. And if the increment
2820 has a count of zero, nothing will be done with it. */
2821 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2822 incr_vec[i].cost = COST_INFINITE;
2824 /* Increments of 0, 1, and -1 are always profitable to replace,
2825 because they always replace a multiply or add with an add or
2826 copy, and may cause one or more existing instructions to go
2827 dead. Exception: -1 can't be assumed to be profitable for
2828 pointer addition. */
2829 else if (incr == 0
2830 || incr == 1
2831 || (incr == -1
2832 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2833 != POINTER_PLUS_EXPR)))
2834 incr_vec[i].cost = COST_NEUTRAL;
2836 /* FORNOW: If we need to add an initializer, give up if a cast from
2837 the candidate's type to its stride's type can lose precision.
2838 This could eventually be handled better by expressly retaining the
2839 result of a cast to a wider type in the stride. Example:
2841 short int _1;
2842 _2 = (int) _1;
2843 _3 = _2 * 10;
2844 _4 = x + _3; ADD: x + (10 * _1) : int
2845 _5 = _2 * 15;
2846 _6 = x + _3; ADD: x + (15 * _1) : int
2848 Right now replacing _6 would cause insertion of an initializer
2849 of the form "short int T = _1 * 5;" followed by a cast to
2850 int, which could overflow incorrectly. Had we recorded _2 or
2851 (int)_1 as the stride, this wouldn't happen. However, doing
2852 this breaks other opportunities, so this will require some
2853 care. */
2854 else if (!incr_vec[i].initializer
2855 && TREE_CODE (first_dep->stride) != INTEGER_CST
2856 && !legal_cast_p_1 (first_dep->stride,
2857 gimple_assign_lhs (first_dep->cand_stmt)))
2859 incr_vec[i].cost = COST_INFINITE;
2861 /* If we need to add an initializer, make sure we don't introduce
2862 a multiply by a pointer type, which can happen in certain cast
2863 scenarios. FIXME: When cleaning up these cast issues, we can
2864 afford to introduce the multiply provided we cast out to an
2865 unsigned int of appropriate size. */
2866 else if (!incr_vec[i].initializer
2867 && TREE_CODE (first_dep->stride) != INTEGER_CST
2868 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2870 incr_vec[i].cost = COST_INFINITE;
2872 /* For any other increment, if this is a multiply candidate, we
2873 must introduce a temporary T and initialize it with
2874 T_0 = stride * increment. When optimizing for speed, walk the
2875 candidate tree to calculate the best cost reduction along any
2876 path; if it offsets the fixed cost of inserting the initializer,
2877 replacing the increment is profitable. When optimizing for
2878 size, instead calculate the total cost reduction from replacing
2879 all candidates with this increment. */
2880 else if (first_dep->kind == CAND_MULT)
2882 int cost = mult_by_coeff_cost (incr, mode, speed);
2883 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2884 if (speed)
2885 cost = lowest_cost_path (cost, repl_savings, first_dep,
2886 incr_vec[i].incr, COUNT_PHIS);
2887 else
2888 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2889 COUNT_PHIS);
2891 incr_vec[i].cost = cost;
2894 /* If this is an add candidate, the initializer may already
2895 exist, so only calculate the cost of the initializer if it
2896 doesn't. We are replacing one add with another here, so the
2897 known replacement savings is zero. We will account for removal
2898 of dead instructions in lowest_cost_path or total_savings. */
2899 else
2901 int cost = 0;
2902 if (!incr_vec[i].initializer)
2903 cost = mult_by_coeff_cost (incr, mode, speed);
2905 if (speed)
2906 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2907 DONT_COUNT_PHIS);
2908 else
2909 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2910 DONT_COUNT_PHIS);
2912 incr_vec[i].cost = cost;
2917 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2918 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2919 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2920 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2921 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2923 static basic_block
2924 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2925 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2927 basic_block ncd;
2929 if (!bb1)
2931 *where = c2;
2932 return bb2;
2935 if (!bb2)
2937 *where = c1;
2938 return bb1;
2941 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2943 /* If both candidates are in the same block, the earlier
2944 candidate wins. */
2945 if (bb1 == ncd && bb2 == ncd)
2947 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2948 *where = c2;
2949 else
2950 *where = c1;
2953 /* Otherwise, if one of them produced a candidate in the
2954 dominator, that one wins. */
2955 else if (bb1 == ncd)
2956 *where = c1;
2958 else if (bb2 == ncd)
2959 *where = c2;
2961 /* If neither matches the dominator, neither wins. */
2962 else
2963 *where = NULL;
2965 return ncd;
2968 /* Consider all candidates that feed PHI. Find the nearest common
2969 dominator of those candidates requiring the given increment INCR.
2970 Further find and return the nearest common dominator of this result
2971 with block NCD. If the returned block contains one or more of the
2972 candidates, return the earliest candidate in the block in *WHERE. */
2974 static basic_block
2975 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2976 basic_block ncd, slsr_cand_t *where)
2978 unsigned i;
2979 slsr_cand_t basis = lookup_cand (c->basis);
2980 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2982 for (i = 0; i < gimple_phi_num_args (phi); i++)
2984 tree arg = gimple_phi_arg_def (phi, i);
2986 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2988 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2990 if (gimple_code (arg_def) == GIMPLE_PHI)
2991 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
2992 where);
2993 else
2995 slsr_cand_t arg_cand = base_cand_from_table (arg);
2996 widest_int diff = arg_cand->index - basis->index;
2997 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2999 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3000 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3005 return ncd;
3008 /* Consider the candidate C together with any candidates that feed
3009 C's phi dependence (if any). Find and return the nearest common
3010 dominator of those candidates requiring the given increment INCR.
3011 If the returned block contains one or more of the candidates,
3012 return the earliest candidate in the block in *WHERE. */
3014 static basic_block
3015 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3017 basic_block ncd = NULL;
3019 if (cand_abs_increment (c) == incr)
3021 ncd = gimple_bb (c->cand_stmt);
3022 *where = c;
3025 if (phi_dependent_cand_p (c))
3026 ncd = ncd_with_phi (c, incr,
3027 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3028 ncd, where);
3030 return ncd;
3033 /* Consider all candidates in the tree rooted at C for which INCR
3034 represents the required increment of C relative to its basis.
3035 Find and return the basic block that most nearly dominates all
3036 such candidates. If the returned block contains one or more of
3037 the candidates, return the earliest candidate in the block in
3038 *WHERE. */
3040 static basic_block
3041 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3042 slsr_cand_t *where)
3044 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3045 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3047 /* First find the NCD of all siblings and dependents. */
3048 if (c->sibling)
3049 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3050 incr, &sib_where);
3051 if (c->dependent)
3052 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3053 incr, &dep_where);
3054 if (!sib_ncd && !dep_ncd)
3056 new_where = NULL;
3057 ncd = NULL;
3059 else if (sib_ncd && !dep_ncd)
3061 new_where = sib_where;
3062 ncd = sib_ncd;
3064 else if (dep_ncd && !sib_ncd)
3066 new_where = dep_where;
3067 ncd = dep_ncd;
3069 else
3070 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3071 dep_where, &new_where);
3073 /* If the candidate's increment doesn't match the one we're interested
3074 in (and nor do any increments for feeding defs of a phi-dependence),
3075 then the result depends only on siblings and dependents. */
3076 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3078 if (!this_ncd || cand_already_replaced (c))
3080 *where = new_where;
3081 return ncd;
3084 /* Otherwise, compare this candidate with the result from all siblings
3085 and dependents. */
3086 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3088 return ncd;
3091 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3093 static inline bool
3094 profitable_increment_p (unsigned index)
3096 return (incr_vec[index].cost <= COST_NEUTRAL);
3099 /* For each profitable increment in the increment vector not equal to
3100 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3101 dominator of all statements in the candidate chain rooted at C
3102 that require that increment, and insert an initializer
3103 T_0 = stride * increment at that location. Record T_0 with the
3104 increment record. */
3106 static void
3107 insert_initializers (slsr_cand_t c)
3109 unsigned i;
3111 for (i = 0; i < incr_vec_len; i++)
3113 basic_block bb;
3114 slsr_cand_t where = NULL;
3115 gassign *init_stmt;
3116 tree stride_type, new_name, incr_tree;
3117 widest_int incr = incr_vec[i].incr;
3119 if (!profitable_increment_p (i)
3120 || incr == 1
3121 || (incr == -1
3122 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3123 || incr == 0)
3124 continue;
3126 /* We may have already identified an existing initializer that
3127 will suffice. */
3128 if (incr_vec[i].initializer)
3130 if (dump_file && (dump_flags & TDF_DETAILS))
3132 fputs ("Using existing initializer: ", dump_file);
3133 print_gimple_stmt (dump_file,
3134 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3135 0, 0);
3137 continue;
3140 /* Find the block that most closely dominates all candidates
3141 with this increment. If there is at least one candidate in
3142 that block, the earliest one will be returned in WHERE. */
3143 bb = nearest_common_dominator_for_cands (c, incr, &where);
3145 /* Create a new SSA name to hold the initializer's value. */
3146 stride_type = TREE_TYPE (c->stride);
3147 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3148 incr_vec[i].initializer = new_name;
3150 /* Create the initializer and insert it in the latest possible
3151 dominating position. */
3152 incr_tree = wide_int_to_tree (stride_type, incr);
3153 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3154 c->stride, incr_tree);
3155 if (where)
3157 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3158 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3159 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3161 else
3163 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3164 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3166 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3167 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3168 else
3169 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3171 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3174 if (dump_file && (dump_flags & TDF_DETAILS))
3176 fputs ("Inserting initializer: ", dump_file);
3177 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3182 /* Return TRUE iff all required increments for candidates feeding PHI
3183 are profitable to replace on behalf of candidate C. */
3185 static bool
3186 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3188 unsigned i;
3189 slsr_cand_t basis = lookup_cand (c->basis);
3190 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3192 for (i = 0; i < gimple_phi_num_args (phi); i++)
3194 tree arg = gimple_phi_arg_def (phi, i);
3196 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3198 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3200 if (gimple_code (arg_def) == GIMPLE_PHI)
3202 if (!all_phi_incrs_profitable (c, arg_def))
3203 return false;
3205 else
3207 int j;
3208 slsr_cand_t arg_cand = base_cand_from_table (arg);
3209 widest_int increment = arg_cand->index - basis->index;
3211 if (!address_arithmetic_p && wi::neg_p (increment))
3212 increment = -increment;
3214 j = incr_vec_index (increment);
3216 if (dump_file && (dump_flags & TDF_DETAILS))
3218 fprintf (dump_file, " Conditional candidate %d, phi: ",
3219 c->cand_num);
3220 print_gimple_stmt (dump_file, phi, 0, 0);
3221 fputs (" increment: ", dump_file);
3222 print_decs (increment, dump_file);
3223 if (j < 0)
3224 fprintf (dump_file,
3225 "\n Not replaced; incr_vec overflow.\n");
3226 else {
3227 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3228 if (profitable_increment_p (j))
3229 fputs (" Replacing...\n", dump_file);
3230 else
3231 fputs (" Not replaced.\n", dump_file);
3235 if (j < 0 || !profitable_increment_p (j))
3236 return false;
3241 return true;
3244 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3245 type TO_TYPE, and insert it in front of the statement represented
3246 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3247 the new SSA name. */
3249 static tree
3250 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3252 tree cast_lhs;
3253 gassign *cast_stmt;
3254 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3256 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3257 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3258 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3259 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3261 if (dump_file && (dump_flags & TDF_DETAILS))
3263 fputs (" Inserting: ", dump_file);
3264 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3267 return cast_lhs;
3270 /* Replace the RHS of the statement represented by candidate C with
3271 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3272 leave C unchanged or just interchange its operands. The original
3273 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3274 If the replacement was made and we are doing a details dump,
3275 return the revised statement, else NULL. */
3277 static gimple *
3278 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3279 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3280 slsr_cand_t c)
3282 if (new_code != old_code
3283 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3284 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3285 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3286 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3288 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3289 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3290 update_stmt (gsi_stmt (gsi));
3291 c->cand_stmt = gsi_stmt (gsi);
3293 if (dump_file && (dump_flags & TDF_DETAILS))
3294 return gsi_stmt (gsi);
3297 else if (dump_file && (dump_flags & TDF_DETAILS))
3298 fputs (" (duplicate, not actually replacing)\n", dump_file);
3300 return NULL;
3303 /* Strength-reduce the statement represented by candidate C by replacing
3304 it with an equivalent addition or subtraction. I is the index into
3305 the increment vector identifying C's increment. NEW_VAR is used to
3306 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3307 is the rhs1 to use in creating the add/subtract. */
3309 static void
3310 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3312 gimple *stmt_to_print = NULL;
3313 tree orig_rhs1, orig_rhs2;
3314 tree rhs2;
3315 enum tree_code orig_code, repl_code;
3316 widest_int cand_incr;
3318 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3319 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3320 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3321 cand_incr = cand_increment (c);
3323 if (dump_file && (dump_flags & TDF_DETAILS))
3325 fputs ("Replacing: ", dump_file);
3326 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3327 stmt_to_print = c->cand_stmt;
3330 if (address_arithmetic_p)
3331 repl_code = POINTER_PLUS_EXPR;
3332 else
3333 repl_code = PLUS_EXPR;
3335 /* If the increment has an initializer T_0, replace the candidate
3336 statement with an add of the basis name and the initializer. */
3337 if (incr_vec[i].initializer)
3339 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3340 tree orig_type = TREE_TYPE (orig_rhs2);
3342 if (types_compatible_p (orig_type, init_type))
3343 rhs2 = incr_vec[i].initializer;
3344 else
3345 rhs2 = introduce_cast_before_cand (c, orig_type,
3346 incr_vec[i].initializer);
3348 if (incr_vec[i].incr != cand_incr)
3350 gcc_assert (repl_code == PLUS_EXPR);
3351 repl_code = MINUS_EXPR;
3354 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3355 orig_code, orig_rhs1, orig_rhs2,
3359 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3360 with a subtract of the stride from the basis name, a copy
3361 from the basis name, or an add of the stride to the basis
3362 name, respectively. It may be necessary to introduce a
3363 cast (or reuse an existing cast). */
3364 else if (cand_incr == 1)
3366 tree stride_type = TREE_TYPE (c->stride);
3367 tree orig_type = TREE_TYPE (orig_rhs2);
3369 if (types_compatible_p (orig_type, stride_type))
3370 rhs2 = c->stride;
3371 else
3372 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3374 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3375 orig_code, orig_rhs1, orig_rhs2,
3379 else if (cand_incr == -1)
3381 tree stride_type = TREE_TYPE (c->stride);
3382 tree orig_type = TREE_TYPE (orig_rhs2);
3383 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3385 if (types_compatible_p (orig_type, stride_type))
3386 rhs2 = c->stride;
3387 else
3388 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3390 if (orig_code != MINUS_EXPR
3391 || !operand_equal_p (basis_name, orig_rhs1, 0)
3392 || !operand_equal_p (rhs2, orig_rhs2, 0))
3394 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3395 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3396 update_stmt (gsi_stmt (gsi));
3397 c->cand_stmt = gsi_stmt (gsi);
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3400 stmt_to_print = gsi_stmt (gsi);
3402 else if (dump_file && (dump_flags & TDF_DETAILS))
3403 fputs (" (duplicate, not actually replacing)\n", dump_file);
3406 else if (cand_incr == 0)
3408 tree lhs = gimple_assign_lhs (c->cand_stmt);
3409 tree lhs_type = TREE_TYPE (lhs);
3410 tree basis_type = TREE_TYPE (basis_name);
3412 if (types_compatible_p (lhs_type, basis_type))
3414 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3415 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3416 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3417 gsi_replace (&gsi, copy_stmt, false);
3418 c->cand_stmt = copy_stmt;
3420 if (dump_file && (dump_flags & TDF_DETAILS))
3421 stmt_to_print = copy_stmt;
3423 else
3425 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3426 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3427 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3428 gsi_replace (&gsi, cast_stmt, false);
3429 c->cand_stmt = cast_stmt;
3431 if (dump_file && (dump_flags & TDF_DETAILS))
3432 stmt_to_print = cast_stmt;
3435 else
3436 gcc_unreachable ();
3438 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3440 fputs ("With: ", dump_file);
3441 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3442 fputs ("\n", dump_file);
3446 /* For each candidate in the tree rooted at C, replace it with
3447 an increment if such has been shown to be profitable. */
3449 static void
3450 replace_profitable_candidates (slsr_cand_t c)
3452 if (!cand_already_replaced (c))
3454 widest_int increment = cand_abs_increment (c);
3455 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3456 int i;
3458 i = incr_vec_index (increment);
3460 /* Only process profitable increments. Nothing useful can be done
3461 to a cast or copy. */
3462 if (i >= 0
3463 && profitable_increment_p (i)
3464 && orig_code != MODIFY_EXPR
3465 && !CONVERT_EXPR_CODE_P (orig_code))
3467 if (phi_dependent_cand_p (c))
3469 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3471 if (all_phi_incrs_profitable (c, phi))
3473 /* Look up the LHS SSA name from C's basis. This will be
3474 the RHS1 of the adds we will introduce to create new
3475 phi arguments. */
3476 slsr_cand_t basis = lookup_cand (c->basis);
3477 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3479 /* Create a new phi statement that will represent C's true
3480 basis after the transformation is complete. */
3481 location_t loc = gimple_location (c->cand_stmt);
3482 tree name = create_phi_basis (c, phi, basis_name,
3483 loc, UNKNOWN_STRIDE);
3485 /* Replace C with an add of the new basis phi and the
3486 increment. */
3487 replace_one_candidate (c, i, name);
3490 else
3492 slsr_cand_t basis = lookup_cand (c->basis);
3493 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3494 replace_one_candidate (c, i, basis_name);
3499 if (c->sibling)
3500 replace_profitable_candidates (lookup_cand (c->sibling));
3502 if (c->dependent)
3503 replace_profitable_candidates (lookup_cand (c->dependent));
3506 /* Analyze costs of related candidates in the candidate vector,
3507 and make beneficial replacements. */
3509 static void
3510 analyze_candidates_and_replace (void)
3512 unsigned i;
3513 slsr_cand_t c;
3515 /* Each candidate that has a null basis and a non-null
3516 dependent is the root of a tree of related statements.
3517 Analyze each tree to determine a subset of those
3518 statements that can be replaced with maximum benefit. */
3519 FOR_EACH_VEC_ELT (cand_vec, i, c)
3521 slsr_cand_t first_dep;
3523 if (c->basis != 0 || c->dependent == 0)
3524 continue;
3526 if (dump_file && (dump_flags & TDF_DETAILS))
3527 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3528 c->cand_num);
3530 first_dep = lookup_cand (c->dependent);
3532 /* If this is a chain of CAND_REFs, unconditionally replace
3533 each of them with a strength-reduced data reference. */
3534 if (c->kind == CAND_REF)
3535 replace_refs (c);
3537 /* If the common stride of all related candidates is a known
3538 constant, each candidate without a phi-dependence can be
3539 profitably replaced. Each replaces a multiply by a single
3540 add, with the possibility that a feeding add also goes dead.
3541 A candidate with a phi-dependence is replaced only if the
3542 compensation code it requires is offset by the strength
3543 reduction savings. */
3544 else if (TREE_CODE (c->stride) == INTEGER_CST)
3545 replace_uncond_cands_and_profitable_phis (first_dep);
3547 /* When the stride is an SSA name, it may still be profitable
3548 to replace some or all of the dependent candidates, depending
3549 on whether the introduced increments can be reused, or are
3550 less expensive to calculate than the replaced statements. */
3551 else
3553 machine_mode mode;
3554 bool speed;
3556 /* Determine whether we'll be generating pointer arithmetic
3557 when replacing candidates. */
3558 address_arithmetic_p = (c->kind == CAND_ADD
3559 && POINTER_TYPE_P (c->cand_type));
3561 /* If all candidates have already been replaced under other
3562 interpretations, nothing remains to be done. */
3563 if (!count_candidates (c))
3564 continue;
3566 /* Construct an array of increments for this candidate chain. */
3567 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3568 incr_vec_len = 0;
3569 record_increments (c);
3571 /* Determine which increments are profitable to replace. */
3572 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3573 speed = optimize_cands_for_speed_p (c);
3574 analyze_increments (first_dep, mode, speed);
3576 /* Insert initializers of the form T_0 = stride * increment
3577 for use in profitable replacements. */
3578 insert_initializers (first_dep);
3579 dump_incr_vec ();
3581 /* Perform the replacements. */
3582 replace_profitable_candidates (first_dep);
3583 free (incr_vec);
3588 namespace {
3590 const pass_data pass_data_strength_reduction =
3592 GIMPLE_PASS, /* type */
3593 "slsr", /* name */
3594 OPTGROUP_NONE, /* optinfo_flags */
3595 TV_GIMPLE_SLSR, /* tv_id */
3596 ( PROP_cfg | PROP_ssa ), /* properties_required */
3597 0, /* properties_provided */
3598 0, /* properties_destroyed */
3599 0, /* todo_flags_start */
3600 0, /* todo_flags_finish */
3603 class pass_strength_reduction : public gimple_opt_pass
3605 public:
3606 pass_strength_reduction (gcc::context *ctxt)
3607 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3610 /* opt_pass methods: */
3611 virtual bool gate (function *) { return flag_tree_slsr; }
3612 virtual unsigned int execute (function *);
3614 }; // class pass_strength_reduction
3616 unsigned
3617 pass_strength_reduction::execute (function *fun)
3619 /* Create the obstack where candidates will reside. */
3620 gcc_obstack_init (&cand_obstack);
3622 /* Allocate the candidate vector. */
3623 cand_vec.create (128);
3625 /* Allocate the mapping from statements to candidate indices. */
3626 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3628 /* Create the obstack where candidate chains will reside. */
3629 gcc_obstack_init (&chain_obstack);
3631 /* Allocate the mapping from base expressions to candidate chains. */
3632 base_cand_map = new hash_table<cand_chain_hasher> (500);
3634 /* Allocate the mapping from bases to alternative bases. */
3635 alt_base_map = new hash_map<tree, tree>;
3637 /* Initialize the loop optimizer. We need to detect flow across
3638 back edges, and this gives us dominator information as well. */
3639 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3641 /* Walk the CFG in predominator order looking for strength reduction
3642 candidates. */
3643 find_candidates_dom_walker (CDI_DOMINATORS)
3644 .walk (fun->cfg->x_entry_block_ptr);
3646 if (dump_file && (dump_flags & TDF_DETAILS))
3648 dump_cand_vec ();
3649 dump_cand_chains ();
3652 delete alt_base_map;
3653 free_affine_expand_cache (&name_expansions);
3655 /* Analyze costs and make appropriate replacements. */
3656 analyze_candidates_and_replace ();
3658 loop_optimizer_finalize ();
3659 delete base_cand_map;
3660 base_cand_map = NULL;
3661 obstack_free (&chain_obstack, NULL);
3662 delete stmt_cand_map;
3663 cand_vec.release ();
3664 obstack_free (&cand_obstack, NULL);
3666 return 0;
3669 } // anon namespace
3671 gimple_opt_pass *
3672 make_pass_strength_reduction (gcc::context *ctxt)
3674 return new pass_strength_reduction (ctxt);